| @@ -51,6 +51,7 @@ require ( | |||||
| github.com/go-enry/go-enry/v2 v2.3.0 | github.com/go-enry/go-enry/v2 v2.3.0 | ||||
| github.com/go-git/go-billy/v5 v5.0.0 | github.com/go-git/go-billy/v5 v5.0.0 | ||||
| github.com/go-git/go-git/v5 v5.0.0 | github.com/go-git/go-git/v5 v5.0.0 | ||||
| github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a | |||||
| github.com/go-ini/ini v1.56.0 // indirect | github.com/go-ini/ini v1.56.0 // indirect | ||||
| github.com/go-macaron/auth v0.0.0-20161228062157-884c0e6c9b92 | github.com/go-macaron/auth v0.0.0-20161228062157-884c0e6c9b92 | ||||
| github.com/go-openapi/jsonreference v0.19.3 // indirect | github.com/go-openapi/jsonreference v0.19.3 // indirect | ||||
| @@ -61,6 +62,7 @@ require ( | |||||
| github.com/gobwas/glob v0.2.3 | github.com/gobwas/glob v0.2.3 | ||||
| github.com/gogs/chardet v0.0.0-20191104214054-4b6791f73a28 | github.com/gogs/chardet v0.0.0-20191104214054-4b6791f73a28 | ||||
| github.com/gogs/cron v0.0.0-20171120032916-9f6c956d3e14 | github.com/gogs/cron v0.0.0-20171120032916-9f6c956d3e14 | ||||
| github.com/golang/mock v1.6.0 // indirect | |||||
| github.com/golang/protobuf v1.4.1 // indirect | github.com/golang/protobuf v1.4.1 // indirect | ||||
| github.com/gomodule/redigo v2.0.0+incompatible | github.com/gomodule/redigo v2.0.0+incompatible | ||||
| github.com/google/go-github/v24 v24.0.1 | github.com/google/go-github/v24 v24.0.1 | ||||
| @@ -105,7 +107,6 @@ require ( | |||||
| github.com/prometheus/procfs v0.0.4 // indirect | github.com/prometheus/procfs v0.0.4 // indirect | ||||
| github.com/quasoft/websspi v1.0.0 | github.com/quasoft/websspi v1.0.0 | ||||
| github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 // indirect | github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 // indirect | ||||
| github.com/robfig/cron/v3 v3.0.1 | |||||
| github.com/satori/go.uuid v1.2.0 | github.com/satori/go.uuid v1.2.0 | ||||
| github.com/sergi/go-diff v1.1.0 | github.com/sergi/go-diff v1.1.0 | ||||
| github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b // indirect | github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b // indirect | ||||
| @@ -125,13 +126,12 @@ require ( | |||||
| github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 | github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 | ||||
| github.com/yuin/goldmark-meta v1.1.0 | github.com/yuin/goldmark-meta v1.1.0 | ||||
| golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37 | golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37 | ||||
| golang.org/x/mod v0.3.0 // indirect | |||||
| golang.org/x/net v0.0.0-20200513185701-a91f0712d120 | |||||
| golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4 | |||||
| golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d | golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d | ||||
| golang.org/x/sys v0.0.0-20200509044756-6aff5f38e54f | |||||
| golang.org/x/text v0.3.2 | |||||
| golang.org/x/sys v0.0.0-20210510120138-977fb7262007 | |||||
| golang.org/x/text v0.3.3 | |||||
| golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 // indirect | golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 // indirect | ||||
| golang.org/x/tools v0.0.0-20200515220128-d3bf790afa53 | |||||
| golang.org/x/tools v0.1.1 | |||||
| google.golang.org/appengine v1.6.5 // indirect | google.golang.org/appengine v1.6.5 // indirect | ||||
| gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect | gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect | ||||
| gopkg.in/asn1-ber.v1 v1.0.0-20150924051756-4e86f4367175 // indirect | gopkg.in/asn1-ber.v1 v1.0.0-20150924051756-4e86f4367175 // indirect | ||||
| @@ -262,6 +262,8 @@ github.com/go-git/go-git-fixtures/v4 v4.0.1 h1:q+IFMfLx200Q3scvt2hN79JsEzy4AmBTp | |||||
| github.com/go-git/go-git-fixtures/v4 v4.0.1/go.mod h1:m+ICp2rF3jDhFgEZ/8yziagdT1C+ZpZcrJjappBCDSw= | github.com/go-git/go-git-fixtures/v4 v4.0.1/go.mod h1:m+ICp2rF3jDhFgEZ/8yziagdT1C+ZpZcrJjappBCDSw= | ||||
| github.com/go-git/go-git/v5 v5.0.0 h1:k5RWPm4iJwYtfWoxIJy4wJX9ON7ihPeZZYC1fLYDnpg= | github.com/go-git/go-git/v5 v5.0.0 h1:k5RWPm4iJwYtfWoxIJy4wJX9ON7ihPeZZYC1fLYDnpg= | ||||
| github.com/go-git/go-git/v5 v5.0.0/go.mod h1:oYD8y9kWsGINPFJoLdaScGCN6dlKg23blmClfZwtUVA= | github.com/go-git/go-git/v5 v5.0.0/go.mod h1:oYD8y9kWsGINPFJoLdaScGCN6dlKg23blmClfZwtUVA= | ||||
| github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a h1:v6zMvHuY9yue4+QkG/HQ/W67wvtQmWJ4SDo9aK/GIno= | |||||
| github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a/go.mod h1:I79BieaU4fxrw4LMXby6q5OS9XnoR9UIKLOzDFjUmuw= | |||||
| github.com/go-ini/ini v1.56.0 h1:6HjxSjqdmgnujDPhlzR4a44lxK3w03WPN8te0SoUSeM= | github.com/go-ini/ini v1.56.0 h1:6HjxSjqdmgnujDPhlzR4a44lxK3w03WPN8te0SoUSeM= | ||||
| github.com/go-ini/ini v1.56.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= | github.com/go-ini/ini v1.56.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= | ||||
| github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= | github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= | ||||
| @@ -358,7 +360,10 @@ github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4er | |||||
| github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E= | github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E= | ||||
| github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= | github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= | ||||
| github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= | github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= | ||||
| github.com/golang/mock v1.3.1 h1:qGJ6qTW+x6xX/my+8YUVl4WNpX9B7+/l2tRsHGZ7f2s= | |||||
| github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= | github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= | ||||
| github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= | |||||
| github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= | |||||
| github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= | github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= | ||||
| github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= | github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= | ||||
| github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= | github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= | ||||
| @@ -404,8 +409,8 @@ github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5m | |||||
| github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | ||||
| github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | ||||
| github.com/gopherjs/gopherjs v0.0.0-20190430165422-3e4dfb77656c/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | github.com/gopherjs/gopherjs v0.0.0-20190430165422-3e4dfb77656c/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | ||||
| github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99 h1:twflg0XRTjwKpxb/jFExr4HGq6on2dEOmnL6FV+fgPw= | |||||
| github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | ||||
| github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 h1:l5lAOZEym3oK3SQ2HBHWsJUfbNBiTXJDeW2QDxw9AQ0= | |||||
| github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | ||||
| github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8= | github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8= | ||||
| github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= | github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= | ||||
| @@ -468,7 +473,6 @@ github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqx | |||||
| github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= | github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= | ||||
| github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= | github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= | ||||
| github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= | github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= | ||||
| github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns= | |||||
| github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= | github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= | ||||
| github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68= | github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68= | ||||
| github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= | github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= | ||||
| @@ -662,8 +666,6 @@ github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqn | |||||
| github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= | github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= | ||||
| github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 h1:YDeskXpkNDhPdWN3REluVa46HQOVuVkjkd2sWnrABNQ= | github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 h1:YDeskXpkNDhPdWN3REluVa46HQOVuVkjkd2sWnrABNQ= | ||||
| github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= | github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= | ||||
| github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= | |||||
| github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= | |||||
| github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= | github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= | ||||
| github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= | github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= | ||||
| github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= | github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= | ||||
| @@ -711,14 +713,12 @@ github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPx | |||||
| github.com/sirupsen/logrus v1.5.0/go.mod h1:+F7Ogzej0PZc/94MaYx/nvG9jOFMD2osvC3s+Squfpo= | github.com/sirupsen/logrus v1.5.0/go.mod h1:+F7Ogzej0PZc/94MaYx/nvG9jOFMD2osvC3s+Squfpo= | ||||
| github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= | github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= | ||||
| github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= | github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= | ||||
| github.com/smartystreets/assertions v1.0.1 h1:voD4ITNjPL5jjBfgR/r8fPIIBrliWrWHeiJApdr3r4w= | |||||
| github.com/smartystreets/assertions v1.0.1/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM= | github.com/smartystreets/assertions v1.0.1/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM= | ||||
| github.com/smartystreets/assertions v1.1.0 h1:MkTeG1DMwsrdH7QtLXy5W+fUxWq+vmb6cLmyJ7aRtF0= | github.com/smartystreets/assertions v1.1.0 h1:MkTeG1DMwsrdH7QtLXy5W+fUxWq+vmb6cLmyJ7aRtF0= | ||||
| github.com/smartystreets/assertions v1.1.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= | github.com/smartystreets/assertions v1.1.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= | ||||
| github.com/smartystreets/go-aws-auth v0.0.0-20180515143844-0c1422d1fdb9/go.mod h1:SnhjPscd9TpLiy1LpzGSKh3bXCfxxXuqd9xmQJy3slM= | github.com/smartystreets/go-aws-auth v0.0.0-20180515143844-0c1422d1fdb9/go.mod h1:SnhjPscd9TpLiy1LpzGSKh3bXCfxxXuqd9xmQJy3slM= | ||||
| github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s= | github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s= | ||||
| github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= | github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= | ||||
| github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337 h1:WN9BUFbdyOsSH/XohnWpXOlq9NBD5sGAB2FciQMUEe8= | |||||
| github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= | github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= | ||||
| github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= | github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= | ||||
| github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= | github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= | ||||
| @@ -749,7 +749,6 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ | |||||
| github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= | github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= | ||||
| github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= | github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= | ||||
| github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= | github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= | ||||
| github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= | |||||
| github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= | github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= | ||||
| github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= | github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= | ||||
| github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= | github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= | ||||
| @@ -804,20 +803,16 @@ github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q | |||||
| github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= | github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= | ||||
| github.com/yohcop/openid-go v1.0.0 h1:EciJ7ZLETHR3wOtxBvKXx9RV6eyHZpCaSZ1inbBaUXE= | github.com/yohcop/openid-go v1.0.0 h1:EciJ7ZLETHR3wOtxBvKXx9RV6eyHZpCaSZ1inbBaUXE= | ||||
| github.com/yohcop/openid-go v1.0.0/go.mod h1:/408xiwkeItSPJZSTPF7+VtZxPkPrRRpRNK2vjGh6yI= | github.com/yohcop/openid-go v1.0.0/go.mod h1:/408xiwkeItSPJZSTPF7+VtZxPkPrRRpRNK2vjGh6yI= | ||||
| github.com/yuin/goldmark v1.1.7/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | |||||
| github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | ||||
| github.com/yuin/goldmark v1.1.27 h1:nqDD4MMMQA0lmWq03Z2/myGPYLQoXtmi0rGVs95ntbo= | |||||
| github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | ||||
| github.com/yuin/goldmark v1.1.30 h1:j4d4Lw3zqZelDhBksEo3BnWg9xhXRQGJPPSL6OApZjI= | |||||
| github.com/yuin/goldmark v1.1.30/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | github.com/yuin/goldmark v1.1.30/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | ||||
| github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= | |||||
| github.com/yuin/goldmark v1.4.5/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= | github.com/yuin/goldmark v1.4.5/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= | ||||
| github.com/yuin/goldmark v1.4.6/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= | github.com/yuin/goldmark v1.4.6/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= | ||||
| github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= | github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= | ||||
| github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | ||||
| github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 h1:yHfZyN55+5dp1wG7wDKv8HQ044moxkyGq12KFFMFDxg= | github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 h1:yHfZyN55+5dp1wG7wDKv8HQ044moxkyGq12KFFMFDxg= | ||||
| github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594/go.mod h1:U9ihbh+1ZN7fR5Se3daSPoz1CGF9IYtSvWwVQtnzGHU= | github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594/go.mod h1:U9ihbh+1ZN7fR5Se3daSPoz1CGF9IYtSvWwVQtnzGHU= | ||||
| github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60 h1:gZucqLjL1eDzVWrXj4uiWeMbAopJlBR2mKQAsTGdPwo= | |||||
| github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60/go.mod h1:i9VhcIHN2PxXMbQrKqXNueok6QNONoPjNMoj9MygVL0= | |||||
| github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc= | github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc= | ||||
| github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0= | github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0= | ||||
| github.com/ziutek/mymysql v1.5.4 h1:GB0qdRGsTwQSBVYuVShFBKaXSnSnYYC2d9knnE1LHFs= | github.com/ziutek/mymysql v1.5.4 h1:GB0qdRGsTwQSBVYuVShFBKaXSnSnYYC2d9knnE1LHFs= | ||||
| @@ -859,14 +854,11 @@ golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad/go.mod h1:yigFU9vqHzYiE8U | |||||
| golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | ||||
| golang.org/x/crypto v0.0.0-20191219195013-becbf705a915/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | golang.org/x/crypto v0.0.0-20191219195013-becbf705a915/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | ||||
| golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | ||||
| golang.org/x/crypto v0.0.0-20200429183012-4b2356b1ed79 h1:IaQbIIB2X/Mp/DKctl6ROxz1KyMlKp4uyvL6+kQ7C88= | |||||
| golang.org/x/crypto v0.0.0-20200429183012-4b2356b1ed79/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | |||||
| golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37 h1:cg5LA/zNPRzIXIWSCxQW10Rvpy94aQh3LT/ShoCpkHw= | golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37 h1:cg5LA/zNPRzIXIWSCxQW10Rvpy94aQh3LT/ShoCpkHw= | ||||
| golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | ||||
| golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= | golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= | ||||
| golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= | golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= | ||||
| golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= | golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= | ||||
| golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a h1:gHevYm0pO4QUbwy8Dmdr01R5r1BuKtfYqRqF0h/Cbh0= | |||||
| golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= | golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= | ||||
| golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3NCfkPxbDKRdnNE1Rpg0U= | golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3NCfkPxbDKRdnNE1Rpg0U= | ||||
| golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= | golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= | ||||
| @@ -882,6 +874,8 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB | |||||
| golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= | golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= | ||||
| golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= | golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= | ||||
| golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= | golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= | ||||
| golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= | |||||
| golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= | |||||
| golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | ||||
| golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | ||||
| golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | ||||
| @@ -913,6 +907,8 @@ golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLL | |||||
| golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= | golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= | ||||
| golang.org/x/net v0.0.0-20200513185701-a91f0712d120 h1:EZ3cVSzKOlJxAd8e8YAJ7no8nNypTxexh/YE/xW3ZEY= | golang.org/x/net v0.0.0-20200513185701-a91f0712d120 h1:EZ3cVSzKOlJxAd8e8YAJ7no8nNypTxexh/YE/xW3ZEY= | ||||
| golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= | golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= | ||||
| golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4 h1:4nGaVu0QrbjT/AK2PRLuQfQuh6DJve+pELhqTdAj3x0= | |||||
| golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= | |||||
| golang.org/x/oauth2 v0.0.0-20180620175406-ef147856a6dd/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= | golang.org/x/oauth2 v0.0.0-20180620175406-ef147856a6dd/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= | ||||
| golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= | golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= | ||||
| golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= | golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= | ||||
| @@ -929,10 +925,11 @@ golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJ | |||||
| golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||
| golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||
| golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||
| golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e h1:vcxGaoTs7kV8m5Np9uUNQin4BrLOthgV7252N8V+FwY= | |||||
| golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||
| golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a h1:WXEvlFVvvGxCJLG6REjsT03iWnKLEWinaScsxF2Vm2o= | golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a h1:WXEvlFVvvGxCJLG6REjsT03iWnKLEWinaScsxF2Vm2o= | ||||
| golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||
| golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= | |||||
| golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | |||||
| golang.org/x/sys v0.0.0-20180824143301-4910a1d54f87/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | golang.org/x/sys v0.0.0-20180824143301-4910a1d54f87/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | ||||
| golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | ||||
| golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | ||||
| @@ -967,10 +964,17 @@ golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7w | |||||
| golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||
| golang.org/x/sys v0.0.0-20200509044756-6aff5f38e54f h1:mOhmO9WsBaJCNmaZHPtHs9wOcdqdKCjF6OPJlmDM3KI= | golang.org/x/sys v0.0.0-20200509044756-6aff5f38e54f h1:mOhmO9WsBaJCNmaZHPtHs9wOcdqdKCjF6OPJlmDM3KI= | ||||
| golang.org/x/sys v0.0.0-20200509044756-6aff5f38e54f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20200509044756-6aff5f38e54f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||
| golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | |||||
| golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | |||||
| golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE= | |||||
| golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | |||||
| golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | |||||
| golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | ||||
| golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | ||||
| golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= | golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= | ||||
| golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= | golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= | ||||
| golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= | |||||
| golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | |||||
| golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= | golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= | ||||
| golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= | golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= | ||||
| golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= | golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= | ||||
| @@ -1001,10 +1005,14 @@ golang.org/x/tools v0.0.0-20200325010219-a49f79bcc224/go.mod h1:Sl4aGygMT6LrqrWc | |||||
| golang.org/x/tools v0.0.0-20200509030707-2212a7e161a5/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= | golang.org/x/tools v0.0.0-20200509030707-2212a7e161a5/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= | ||||
| golang.org/x/tools v0.0.0-20200515220128-d3bf790afa53 h1:vmsb6v0zUdmUlXfwKaYrHPPRCV0lHq/IwNIf0ASGjyQ= | golang.org/x/tools v0.0.0-20200515220128-d3bf790afa53 h1:vmsb6v0zUdmUlXfwKaYrHPPRCV0lHq/IwNIf0ASGjyQ= | ||||
| golang.org/x/tools v0.0.0-20200515220128-d3bf790afa53/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= | golang.org/x/tools v0.0.0-20200515220128-d3bf790afa53/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= | ||||
| golang.org/x/tools v0.1.1 h1:wGiQel/hW0NnEkJUk8lbzkX2gFJU6PFxf1v5OlCfuOs= | |||||
| golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= | |||||
| golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | ||||
| golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | ||||
| golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= | golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= | ||||
| golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | ||||
| golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= | |||||
| golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | |||||
| google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= | google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= | ||||
| google.golang.org/api v0.0.0-20181030000543-1d582fd0359e/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= | google.golang.org/api v0.0.0-20181030000543-1d582fd0359e/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= | ||||
| google.golang.org/api v0.0.0-20181220000619-583d854617af/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= | google.golang.org/api v0.0.0-20181220000619-583d854617af/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= | ||||
| @@ -1076,8 +1084,6 @@ gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= | |||||
| gopkg.in/ini.v1 v1.44.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= | gopkg.in/ini.v1 v1.44.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= | ||||
| gopkg.in/ini.v1 v1.44.2/go.mod h1:M3Cogqpuv0QCi3ExAY5V4uOt4qb/R3xZubo9m8lK5wg= | gopkg.in/ini.v1 v1.44.2/go.mod h1:M3Cogqpuv0QCi3ExAY5V4uOt4qb/R3xZubo9m8lK5wg= | ||||
| gopkg.in/ini.v1 v1.46.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= | gopkg.in/ini.v1 v1.46.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= | ||||
| gopkg.in/ini.v1 v1.52.0 h1:j+Lt/M1oPPejkniCg1TkWE2J3Eh1oZTsHSXzMTzUXn4= | |||||
| gopkg.in/ini.v1 v1.52.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= | |||||
| gopkg.in/ini.v1 v1.56.0 h1:DPMeDvGTM54DXbPkVIZsp19fp/I2K7zwA/itHYHKo8Y= | gopkg.in/ini.v1 v1.56.0 h1:DPMeDvGTM54DXbPkVIZsp19fp/I2K7zwA/itHYHKo8Y= | ||||
| gopkg.in/ini.v1 v1.56.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= | gopkg.in/ini.v1 v1.56.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= | ||||
| gopkg.in/ldap.v3 v3.0.2 h1:R6RBtabK6e1GO0eQKtkyOFbAHO73QesLzI2w2DZ6b9w= | gopkg.in/ldap.v3 v3.0.2 h1:R6RBtabK6e1GO0eQKtkyOFbAHO73QesLzI2w2DZ6b9w= | ||||
| @@ -1098,7 +1104,6 @@ gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bl | |||||
| gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | ||||
| gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | ||||
| gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | ||||
| gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= | |||||
| gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | ||||
| gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= | gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= | ||||
| gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | ||||
| @@ -88,7 +88,7 @@ type AiModelQueryOptions struct { | |||||
| } | } | ||||
| func (a *AiModelConvert) IsGpuTrainTask() bool { | func (a *AiModelConvert) IsGpuTrainTask() bool { | ||||
| if a.SrcEngine == 0 || a.SrcEngine == 1 { | |||||
| if a.SrcEngine == 0 || a.SrcEngine == 1 || a.SrcEngine == 4 || a.SrcEngine == 6 { | |||||
| return true | return true | ||||
| } | } | ||||
| return false | return false | ||||
| @@ -116,6 +116,8 @@ const ( | |||||
| GrampusStatusStopped = "STOPPED" | GrampusStatusStopped = "STOPPED" | ||||
| GrampusStatusUnknown = "UNKNOWN" | GrampusStatusUnknown = "UNKNOWN" | ||||
| GrampusStatusWaiting = "WAITING" | GrampusStatusWaiting = "WAITING" | ||||
| ModelSuffix = "models.zip" | |||||
| ) | ) | ||||
| const ( | const ( | ||||
| @@ -1070,6 +1072,12 @@ type CreateInferenceJobParams struct { | |||||
| InfConfig InfConfig `json:"config"` | InfConfig InfConfig `json:"config"` | ||||
| WorkspaceID string `json:"workspace_id"` | WorkspaceID string `json:"workspace_id"` | ||||
| } | } | ||||
| type CreateInfUserImageParams struct { | |||||
| JobName string `json:"job_name"` | |||||
| Description string `json:"job_desc"` | |||||
| Config InfUserImageConfig `json:"config"` | |||||
| WorkspaceID string `json:"workspace_id"` | |||||
| } | |||||
| type InfConfig struct { | type InfConfig struct { | ||||
| WorkServerNum int `json:"worker_server_num"` | WorkServerNum int `json:"worker_server_num"` | ||||
| @@ -1084,6 +1092,21 @@ type InfConfig struct { | |||||
| PoolID string `json:"pool_id"` | PoolID string `json:"pool_id"` | ||||
| } | } | ||||
| type InfUserImageConfig struct { | |||||
| WorkServerNum int `json:"worker_server_num"` | |||||
| AppUrl string `json:"app_url"` //训练作业的代码目录 | |||||
| BootFileUrl string `json:"boot_file_url"` //训练作业的代码启动文件,需要在代码目录下 | |||||
| Parameter []Parameter `json:"parameter"` | |||||
| DataUrl string `json:"data_url"` //训练作业需要的数据集OBS路径URL | |||||
| EngineID int64 `json:"engine_id"` | |||||
| LogUrl string `json:"log_url"` | |||||
| CreateVersion bool `json:"create_version"` | |||||
| Flavor Flavor `json:"flavor"` | |||||
| PoolID string `json:"pool_id"` | |||||
| UserImageUrl string `json:"user_image_url"` | |||||
| UserCommand string `json:"user_command"` | |||||
| } | |||||
| type CreateTrainJobVersionParams struct { | type CreateTrainJobVersionParams struct { | ||||
| Description string `json:"job_desc"` | Description string `json:"job_desc"` | ||||
| Config TrainJobVersionConfig `json:"config"` | Config TrainJobVersionConfig `json:"config"` | ||||
| @@ -2024,7 +2047,7 @@ func GetCloudbrainRunCountByRepoID(repoID int64) (int, error) { | |||||
| } | } | ||||
| func GetModelSafetyCountByUserID(userID int64) (int, error) { | func GetModelSafetyCountByUserID(userID int64) (int, error) { | ||||
| count, err := x.In("status", JobWaiting, JobRunning,ModelArtsTrainJobInit,ModelArtsTrainJobImageCreating,ModelArtsTrainJobSubmitTrying,ModelArtsTrainJobScaling,ModelArtsTrainJobCheckInit,ModelArtsTrainJobCheckRunning,ModelArtsTrainJobCheckRunningCompleted).And("job_type = ? and user_id = ?", string(JobTypeModelSafety), userID).Count(new(Cloudbrain)) | |||||
| count, err := x.In("status", JobWaiting, JobRunning, ModelArtsTrainJobInit, ModelArtsTrainJobImageCreating, ModelArtsTrainJobSubmitTrying, ModelArtsTrainJobScaling, ModelArtsTrainJobCheckInit, ModelArtsTrainJobCheckRunning, ModelArtsTrainJobCheckRunningCompleted).And("job_type = ? and user_id = ?", string(JobTypeModelSafety), userID).Count(new(Cloudbrain)) | |||||
| return int(count), err | return int(count), err | ||||
| } | } | ||||
| @@ -2260,9 +2283,9 @@ func CloudbrainAllStatic(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, er | |||||
| } | } | ||||
| sess.Limit(opts.PageSize, start) | sess.Limit(opts.PageSize, start) | ||||
| } | } | ||||
| sess.OrderBy("cloudbrain.created_unix DESC") | |||||
| // sess.OrderBy("cloudbrain.created_unix DESC") | |||||
| cloudbrains := make([]*CloudbrainInfo, 0, setting.UI.IssuePagingNum) | cloudbrains := make([]*CloudbrainInfo, 0, setting.UI.IssuePagingNum) | ||||
| if err := sess.Table(&Cloudbrain{}).Unscoped().Where(cond). | |||||
| if err := sess.Cols("status", "type", "job_type", "train_job_duration", "duration", "compute_resource", "created_unix", "start_time", "end_time", "work_server_number").Table(&Cloudbrain{}).Unscoped().Where(cond). | |||||
| Find(&cloudbrains); err != nil { | Find(&cloudbrains); err != nil { | ||||
| return nil, 0, fmt.Errorf("Find: %v", err) | return nil, 0, fmt.Errorf("Find: %v", err) | ||||
| } | } | ||||
| @@ -9,7 +9,7 @@ type CloudbrainSpec struct { | |||||
| SpecId int64 `xorm:"index"` | SpecId int64 `xorm:"index"` | ||||
| SourceSpecId string | SourceSpecId string | ||||
| AccCardsNum int | AccCardsNum int | ||||
| AccCardType string | |||||
| AccCardType string `xorm:"index"` | |||||
| CpuCores int | CpuCores int | ||||
| MemGiB float32 | MemGiB float32 | ||||
| GPUMemGiB float32 | GPUMemGiB float32 | ||||
| @@ -19,7 +19,7 @@ type CloudbrainSpec struct { | |||||
| QueueId int64 | QueueId int64 | ||||
| QueueCode string | QueueCode string | ||||
| Cluster string | Cluster string | ||||
| AiCenterCode string | |||||
| AiCenterCode string `xorm:"index"` | |||||
| AiCenterName string | AiCenterName string | ||||
| IsExclusive bool | IsExclusive bool | ||||
| ExclusiveOrg string | ExclusiveOrg string | ||||
| @@ -1,6 +1,7 @@ | |||||
| package models | package models | ||||
| import ( | import ( | ||||
| "fmt" | |||||
| "strconv" | "strconv" | ||||
| "time" | "time" | ||||
| @@ -38,6 +39,60 @@ type TaskDetail struct { | |||||
| Spec *Specification `json:"Spec"` | Spec *Specification `json:"Spec"` | ||||
| } | } | ||||
| type CloudbrainDurationStatistic struct { | |||||
| ID int64 `xorm:"pk autoincr"` | |||||
| Cluster string | |||||
| AiCenterCode string | |||||
| AiCenterName string | |||||
| ComputeResource string | |||||
| AccCardType string | |||||
| DateTime string | |||||
| DayTime string | |||||
| HourTime int | |||||
| CardsUseDuration int | |||||
| CardsTotalDuration int | |||||
| CardsTotalNum int | |||||
| DeletedUnix timeutil.TimeStamp `xorm:"deleted"` | |||||
| CreatedUnix timeutil.TimeStamp `xorm:"created"` | |||||
| UpdatedUnix timeutil.TimeStamp `xorm:"updated"` | |||||
| } | |||||
| type DurationStatisticOptions struct { | |||||
| BeginTime time.Time | |||||
| EndTime time.Time | |||||
| AiCenterCode string | |||||
| } | |||||
| type DurationRateStatistic struct { | |||||
| AiCenterTotalDurationStat map[string]int `json:"aiCenterTotalDurationStat"` | |||||
| AiCenterUsageDurationStat map[string]int `json:"aiCenterUsageDurationStat"` | |||||
| UsageRate map[string]float64 `json:"UsageRate"` | |||||
| } | |||||
| type ResourceDetail struct { | |||||
| QueueCode string | |||||
| Cluster string `xorm:"notnull"` | |||||
| AiCenterCode string | |||||
| AiCenterName string | |||||
| ComputeResource string | |||||
| AccCardType string | |||||
| CardsTotalNum int | |||||
| IsAutomaticSync bool | |||||
| } | |||||
| type DateUsageStatistic struct { | |||||
| Date string `json:"date"` | |||||
| UsageDuration int `json:"usageDuration"` | |||||
| TotalDuration int `json:"totalDuration"` | |||||
| UsageRate float64 `json:"usageRate"` | |||||
| } | |||||
| type HourTimeStatistic struct { | |||||
| HourTimeUsageDuration map[string]int `json:"hourTimeUsageDuration"` | |||||
| HourTimeTotalDuration map[string]int `json:"hourTimeTotalDuration"` | |||||
| HourTimeUsageRate map[string]float64 `json:"hourTimeUsageRate"` | |||||
| } | |||||
| func GetTodayCreatorCount(beginTime time.Time, endTime time.Time) (int64, error) { | func GetTodayCreatorCount(beginTime time.Time, endTime time.Time) (int64, error) { | ||||
| countSql := "SELECT count(distinct user_id) FROM " + | countSql := "SELECT count(distinct user_id) FROM " + | ||||
| "public.cloudbrain where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + | "public.cloudbrain where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + | ||||
| @@ -199,3 +254,121 @@ func GetRunHourPeriodCount(dateBeginTime string, dateEndTime string) (map[string | |||||
| } | } | ||||
| return dateHourMap, nil | return dateHourMap, nil | ||||
| } | } | ||||
| func GetCloudbrainRunning() ([]*CloudbrainInfo, error) { | |||||
| sess := x.NewSession() | |||||
| defer sess.Close() | |||||
| var cond = builder.NewCond() | |||||
| cond = cond.And( | |||||
| builder.Eq{"cloudbrain.status": string(JobRunning)}, | |||||
| ) | |||||
| sess.OrderBy("cloudbrain.created_unix ASC") | |||||
| cloudbrains := make([]*CloudbrainInfo, 0, 10) | |||||
| if err := sess.Table(&Cloudbrain{}).Where(cond). | |||||
| Find(&cloudbrains); err != nil { | |||||
| log.Info("find error.") | |||||
| } | |||||
| return cloudbrains, nil | |||||
| } | |||||
| func GetCloudbrainByTime(beginTime int64, endTime int64) ([]*CloudbrainInfo, error) { | |||||
| sess := x.NewSession() | |||||
| defer sess.Close() | |||||
| var cond = builder.NewCond() | |||||
| cond = cond.And( | |||||
| builder.And(builder.Gte{"cloudbrain.end_time": beginTime}, builder.Lte{"cloudbrain.end_time": endTime}), | |||||
| ) | |||||
| cond = cond.Or( | |||||
| builder.Eq{"cloudbrain.status": string(JobRunning)}, | |||||
| ) | |||||
| sess.OrderBy("cloudbrain.created_unix ASC") | |||||
| cloudbrains := make([]*CloudbrainInfo, 0, 10) | |||||
| if err := sess.Table(&Cloudbrain{}).Unscoped().Where(cond). | |||||
| Find(&cloudbrains); err != nil { | |||||
| log.Info("find error.") | |||||
| } | |||||
| return cloudbrains, nil | |||||
| } | |||||
| func GetSpecByAiCenterCodeAndType(aiCenterCode string, accCardType string) ([]*CloudbrainSpec, error) { | |||||
| sess := x.NewSession() | |||||
| defer sess.Close() | |||||
| var cond = builder.NewCond() | |||||
| cond = cond.And( | |||||
| builder.And(builder.Eq{"cloudbrain_spec.ai_center_code": aiCenterCode}, builder.Eq{"cloudbrain_spec.acc_card_type": accCardType}), | |||||
| ) | |||||
| cloudbrainSpecs := make([]*CloudbrainSpec, 0, 10) | |||||
| if err := sess.Table(&CloudbrainSpec{}).Where(cond). | |||||
| Find(&cloudbrainSpecs); err != nil { | |||||
| log.Info("find error.") | |||||
| } | |||||
| return cloudbrainSpecs, nil | |||||
| } | |||||
| func InsertCloudbrainDurationStatistic(cloudbrainDurationStatistic *CloudbrainDurationStatistic) (int64, error) { | |||||
| return xStatistic.Insert(cloudbrainDurationStatistic) | |||||
| } | |||||
| func DeleteCloudbrainDurationStatisticHour(date string, hour int, aiCenterCode string, accCardType string) error { | |||||
| sess := xStatistic.NewSession() | |||||
| defer sess.Close() | |||||
| if err := sess.Begin(); err != nil { | |||||
| return fmt.Errorf("Begin: %v", err) | |||||
| } | |||||
| if _, err := sess.Where("day_time = ? AND hour_time = ? AND ai_center_code = ? AND acc_card_type = ?", date, hour, aiCenterCode, accCardType).Delete(&CloudbrainDurationStatistic{}); err != nil { | |||||
| return fmt.Errorf("Delete: %v", err) | |||||
| } | |||||
| if err := sess.Commit(); err != nil { | |||||
| sess.Close() | |||||
| return fmt.Errorf("Commit: %v", err) | |||||
| } | |||||
| sess.Close() | |||||
| return nil | |||||
| } | |||||
| func GetCanUseCardInfo() ([]*ResourceQueue, error) { | |||||
| sess := x.NewSession() | |||||
| defer sess.Close() | |||||
| sess.OrderBy("resource_queue.id ASC") | |||||
| ResourceQueues := make([]*ResourceQueue, 0, 10) | |||||
| if err := sess.Table(&ResourceQueue{}).Find(&ResourceQueues); err != nil { | |||||
| log.Info("find error.") | |||||
| } | |||||
| return ResourceQueues, nil | |||||
| } | |||||
| func GetCardDurationStatistics(opts *DurationStatisticOptions) ([]*CloudbrainDurationStatistic, error) { | |||||
| sess := xStatistic.NewSession() | |||||
| defer sess.Close() | |||||
| var cond = builder.NewCond() | |||||
| if opts.BeginTime.Unix() > 0 && opts.EndTime.Unix() > 0 { | |||||
| cond = cond.And( | |||||
| builder.And(builder.Gte{"cloudbrain_duration_statistic.created_unix": opts.BeginTime.Unix()}, builder.Lte{"cloudbrain_duration_statistic.created_unix": opts.EndTime.Unix()}), | |||||
| ) | |||||
| } | |||||
| if opts.AiCenterCode != "" { | |||||
| cond = cond.And( | |||||
| builder.Eq{"cloudbrain_duration_statistic.ai_center_code": opts.AiCenterCode}, | |||||
| ) | |||||
| } | |||||
| CloudbrainDurationStatistics := make([]*CloudbrainDurationStatistic, 0, 10) | |||||
| if err := sess.Table(&CloudbrainDurationStatistic{}).Where(cond). | |||||
| Find(&CloudbrainDurationStatistics); err != nil { | |||||
| log.Info("find error.") | |||||
| } | |||||
| return CloudbrainDurationStatistics, nil | |||||
| } | |||||
| func GetDurationRecordBeginTime() ([]*CloudbrainDurationStatistic, error) { | |||||
| sess := xStatistic.NewSession() | |||||
| defer sess.Close() | |||||
| sess.OrderBy("cloudbrain_duration_statistic.id ASC limit 1") | |||||
| CloudbrainDurationStatistics := make([]*CloudbrainDurationStatistic, 0) | |||||
| if err := sess.Table(&CloudbrainDurationStatistic{}).Find(&CloudbrainDurationStatistics); err != nil { | |||||
| log.Info("find error.") | |||||
| } | |||||
| return CloudbrainDurationStatistics, nil | |||||
| } | |||||
| @@ -161,6 +161,7 @@ func init() { | |||||
| new(CloudbrainSpec), | new(CloudbrainSpec), | ||||
| new(CloudbrainTemp), | new(CloudbrainTemp), | ||||
| new(DatasetReference), | new(DatasetReference), | ||||
| new(ScheduleRecord), | |||||
| new(BadgeCategory), | new(BadgeCategory), | ||||
| new(Badge), | new(Badge), | ||||
| new(BadgeUser), | new(BadgeUser), | ||||
| @@ -183,6 +184,7 @@ func init() { | |||||
| new(UserMetrics), | new(UserMetrics), | ||||
| new(UserAnalysisPara), | new(UserAnalysisPara), | ||||
| new(Invitation), | new(Invitation), | ||||
| new(CloudbrainDurationStatistic), | |||||
| ) | ) | ||||
| gonicNames := []string{"SSL", "UID"} | gonicNames := []string{"SSL", "UID"} | ||||
| @@ -454,6 +454,7 @@ func (repo *Repository) innerAPIFormat(e Engine, mode AccessMode, isParent bool) | |||||
| AllowRebaseMerge: allowRebaseMerge, | AllowRebaseMerge: allowRebaseMerge, | ||||
| AllowSquash: allowSquash, | AllowSquash: allowSquash, | ||||
| AvatarURL: repo.avatarLink(e), | AvatarURL: repo.avatarLink(e), | ||||
| Status: int(repo.Status), | |||||
| } | } | ||||
| } | } | ||||
| @@ -249,22 +249,23 @@ type AdminRewardOperateReq struct { | |||||
| } | } | ||||
| type RewardOperateRecordShow struct { | type RewardOperateRecordShow struct { | ||||
| SerialNo string | |||||
| Status string | |||||
| OperateType string | |||||
| SourceId string | |||||
| Amount int64 | |||||
| LossAmount int64 | |||||
| BalanceAfter int64 | |||||
| Remark string | |||||
| SourceType string | |||||
| UserName string | |||||
| LastOperateDate timeutil.TimeStamp | |||||
| UnitPrice int64 | |||||
| SuccessCount int | |||||
| Action *ActionShow | |||||
| Cloudbrain *CloudbrainShow | |||||
| AdminLog *RewardAdminLogShow | |||||
| SerialNo string | |||||
| Status string | |||||
| OperateType string | |||||
| SourceId string | |||||
| Amount int64 | |||||
| LossAmount int64 | |||||
| BalanceAfter int64 | |||||
| Remark string | |||||
| SourceType string | |||||
| SourceTemplateId string | |||||
| UserName string | |||||
| LastOperateDate timeutil.TimeStamp | |||||
| UnitPrice int64 | |||||
| SuccessCount int | |||||
| Action *ActionShow | |||||
| Cloudbrain *CloudbrainShow | |||||
| AdminLog *RewardAdminLogShow | |||||
| } | } | ||||
| func getPointOperateRecord(tl *RewardOperateRecord) (*RewardOperateRecord, error) { | func getPointOperateRecord(tl *RewardOperateRecord) (*RewardOperateRecord, error) { | ||||
| @@ -419,7 +420,7 @@ func GetRewardRecordShowList(opts *RewardRecordListOpts) (RewardRecordShowList, | |||||
| r := make([]*RewardOperateRecordShow, 0) | r := make([]*RewardOperateRecordShow, 0) | ||||
| err = x.Table("reward_operate_record").Cols("reward_operate_record.source_id", "reward_operate_record.serial_no", | err = x.Table("reward_operate_record").Cols("reward_operate_record.source_id", "reward_operate_record.serial_no", | ||||
| "reward_operate_record.status", "reward_operate_record.operate_type", "reward_operate_record.amount", | "reward_operate_record.status", "reward_operate_record.operate_type", "reward_operate_record.amount", | ||||
| "reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type", | |||||
| "reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type", "reward_operate_record.source_template_id", | |||||
| "reward_operate_record.last_operate_unix as last_operate_date"). | "reward_operate_record.last_operate_unix as last_operate_date"). | ||||
| Where(cond).Limit(opts.PageSize, (opts.Page-1)*opts.PageSize).OrderBy(string(opts.OrderBy)).Find(&r) | Where(cond).Limit(opts.PageSize, (opts.Page-1)*opts.PageSize).OrderBy(string(opts.OrderBy)).Find(&r) | ||||
| @@ -441,7 +442,7 @@ func GetAdminRewardRecordShowList(opts *RewardRecordListOpts) (RewardRecordShowL | |||||
| case OperateTypeIncrease: | case OperateTypeIncrease: | ||||
| err = x.Table("reward_operate_record").Cols("reward_operate_record.source_id", "reward_operate_record.serial_no", | err = x.Table("reward_operate_record").Cols("reward_operate_record.source_id", "reward_operate_record.serial_no", | ||||
| "reward_operate_record.status", "reward_operate_record.operate_type", "reward_operate_record.amount", | "reward_operate_record.status", "reward_operate_record.operate_type", "reward_operate_record.amount", | ||||
| "reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type", | |||||
| "reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type", "reward_operate_record.source_template_id", | |||||
| "reward_operate_record.last_operate_unix as last_operate_date", "public.user.name as user_name", | "reward_operate_record.last_operate_unix as last_operate_date", "public.user.name as user_name", | ||||
| "point_account_log.balance_after"). | "point_account_log.balance_after"). | ||||
| Join("LEFT", "public.user", "reward_operate_record.user_id = public.user.id"). | Join("LEFT", "public.user", "reward_operate_record.user_id = public.user.id"). | ||||
| @@ -450,7 +451,7 @@ func GetAdminRewardRecordShowList(opts *RewardRecordListOpts) (RewardRecordShowL | |||||
| case OperateTypeDecrease: | case OperateTypeDecrease: | ||||
| err = x.Table("reward_operate_record").Cols("reward_operate_record.source_id", "reward_operate_record.serial_no", | err = x.Table("reward_operate_record").Cols("reward_operate_record.source_id", "reward_operate_record.serial_no", | ||||
| "reward_operate_record.status", "reward_operate_record.operate_type", "reward_operate_record.amount", | "reward_operate_record.status", "reward_operate_record.operate_type", "reward_operate_record.amount", | ||||
| "reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type", | |||||
| "reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type", "reward_operate_record.source_template_id", | |||||
| "reward_operate_record.last_operate_unix as last_operate_date", "public.user.name as user_name", | "reward_operate_record.last_operate_unix as last_operate_date", "public.user.name as user_name", | ||||
| "reward_periodic_task.amount as unit_price", "reward_periodic_task.success_count"). | "reward_periodic_task.amount as unit_price", "reward_periodic_task.success_count"). | ||||
| Join("LEFT", "public.user", "reward_operate_record.user_id = public.user.id"). | Join("LEFT", "public.user", "reward_operate_record.user_id = public.user.id"). | ||||
| @@ -0,0 +1,70 @@ | |||||
| package models | |||||
| import ( | |||||
| "fmt" | |||||
| "time" | |||||
| "code.gitea.io/gitea/modules/timeutil" | |||||
| ) | |||||
| const ( | |||||
| StorageScheduleSucceed int = iota | |||||
| StorageScheduleProcessing | |||||
| StorageScheduleFailed | |||||
| StorageNoFile | |||||
| StorageScheduleWaiting | |||||
| ) | |||||
| type ScheduleRecord struct { | |||||
| ID int64 `xorm:"pk autoincr"` | |||||
| CloudbrainID int64 `xorm:"INDEX NOT NULL unique"` | |||||
| EndPoint string `xorm:"INDEX NOT NULL"` | |||||
| Bucket string `xorm:"INDEX NOT NULL"` | |||||
| ObjectKey string `xorm:"INDEX NOT NULL"` | |||||
| ProxyServer string `xorm:"INDEX NOT NULL"` | |||||
| Status int `xorm:"INDEX NOT NULL DEFAULT 0"` | |||||
| CreatedUnix timeutil.TimeStamp `xorm:"created"` | |||||
| UpdatedUnix timeutil.TimeStamp `xorm:"updated"` | |||||
| DeletedAt time.Time `xorm:"deleted"` | |||||
| } | |||||
| func updateScheduleCols(e Engine, record *ScheduleRecord, cols ...string) error { | |||||
| _, err := e.ID(record.ID).Cols(cols...).Update(record) | |||||
| return err | |||||
| } | |||||
| func UpdateScheduleCols(record *ScheduleRecord, cols ...string) error { | |||||
| return updateScheduleCols(x, record, cols...) | |||||
| } | |||||
| func GetSchedulingRecord() ([]*ScheduleRecord, error) { | |||||
| records := make([]*ScheduleRecord, 0, 10) | |||||
| return records, x. | |||||
| Where("status = ?", StorageScheduleProcessing). | |||||
| Limit(100). | |||||
| Find(&records) | |||||
| } | |||||
| func InsertScheduleRecord(record *ScheduleRecord) (_ *ScheduleRecord, err error) { | |||||
| if _, err := x.Insert(record); err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return record, nil | |||||
| } | |||||
| func getScheduleRecordByPrID(e Engine, cloudbrainId int64) (*ScheduleRecord, error) { | |||||
| record := new(ScheduleRecord) | |||||
| has, err := e.Where("cloudbrain_id = ?", cloudbrainId).Get(record) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } else if !has { | |||||
| return nil, fmt.Errorf("get record by cloudbrain_id failed(%d)", cloudbrainId) | |||||
| } | |||||
| return record, nil | |||||
| } | |||||
| func GetScheduleRecordByCloudbrainID(cloudbrainId int64) (*ScheduleRecord, error) { | |||||
| return getScheduleRecordByPrID(x, cloudbrainId) | |||||
| } | |||||
| @@ -311,6 +311,7 @@ func ToOrganization(org *models.User) *api.Organization { | |||||
| Location: org.Location, | Location: org.Location, | ||||
| Visibility: org.Visibility.String(), | Visibility: org.Visibility.String(), | ||||
| RepoAdminChangeTeamAccess: org.RepoAdminChangeTeamAccess, | RepoAdminChangeTeamAccess: org.RepoAdminChangeTeamAccess, | ||||
| NumRepos: org.NumRepos, | |||||
| } | } | ||||
| } | } | ||||
| @@ -5,6 +5,7 @@ | |||||
| package cron | package cron | ||||
| import ( | import ( | ||||
| "code.gitea.io/gitea/modules/urfs_client/urchin" | |||||
| "context" | "context" | ||||
| "time" | "time" | ||||
| @@ -222,6 +223,17 @@ func registerSyncCloudbrainStatus() { | |||||
| }) | }) | ||||
| } | } | ||||
| func registerHandleScheduleRecord() { | |||||
| RegisterTaskFatal("handle_schedule_record", &BaseConfig{ | |||||
| Enabled: true, | |||||
| RunAtStart: false, | |||||
| Schedule: "@every 1m", | |||||
| }, func(ctx context.Context, _ *models.User, _ Config) error { | |||||
| urchin.HandleScheduleRecords() | |||||
| return nil | |||||
| }) | |||||
| } | |||||
| func registerRewardPeriodTask() { | func registerRewardPeriodTask() { | ||||
| RegisterTaskFatal("reward_period_task", &BaseConfig{ | RegisterTaskFatal("reward_period_task", &BaseConfig{ | ||||
| Enabled: true, | Enabled: true, | ||||
| @@ -266,6 +278,17 @@ func registerSyncModelArtsTempJobs() { | |||||
| }) | }) | ||||
| } | } | ||||
| func registerHandleCloudbrainDurationStatistic() { | |||||
| RegisterTaskFatal("handle_cloudbrain_duration_statistic", &BaseConfig{ | |||||
| Enabled: true, | |||||
| RunAtStart: false, | |||||
| Schedule: "1 0 * * * ?", | |||||
| }, func(ctx context.Context, _ *models.User, _ Config) error { | |||||
| repo.CloudbrainDurationStatisticHour() | |||||
| return nil | |||||
| }) | |||||
| } | |||||
| func initBasicTasks() { | func initBasicTasks() { | ||||
| registerUpdateMirrorTask() | registerUpdateMirrorTask() | ||||
| registerRepoHealthCheck() | registerRepoHealthCheck() | ||||
| @@ -293,4 +316,7 @@ func initBasicTasks() { | |||||
| registerCloudbrainPointDeductTask() | registerCloudbrainPointDeductTask() | ||||
| registerHandleModelSafetyTask() | registerHandleModelSafetyTask() | ||||
| registerHandleScheduleRecord() | |||||
| registerHandleCloudbrainDurationStatistic() | |||||
| } | } | ||||
| @@ -1,16 +1,15 @@ | |||||
| package grampus | package grampus | ||||
| import ( | import ( | ||||
| "code.gitea.io/gitea/modules/cloudbrain" | |||||
| "encoding/json" | "encoding/json" | ||||
| "strings" | "strings" | ||||
| "code.gitea.io/gitea/modules/setting" | |||||
| "code.gitea.io/gitea/models" | "code.gitea.io/gitea/models" | ||||
| "code.gitea.io/gitea/modules/cloudbrain" | |||||
| "code.gitea.io/gitea/modules/context" | "code.gitea.io/gitea/modules/context" | ||||
| "code.gitea.io/gitea/modules/log" | "code.gitea.io/gitea/modules/log" | ||||
| "code.gitea.io/gitea/modules/notification" | "code.gitea.io/gitea/modules/notification" | ||||
| "code.gitea.io/gitea/modules/setting" | |||||
| "code.gitea.io/gitea/modules/timeutil" | "code.gitea.io/gitea/modules/timeutil" | ||||
| ) | ) | ||||
| @@ -20,10 +19,15 @@ const ( | |||||
| ProcessorTypeNPU = "npu.huawei.com/NPU" | ProcessorTypeNPU = "npu.huawei.com/NPU" | ||||
| ProcessorTypeGPU = "nvidia.com/gpu" | ProcessorTypeGPU = "nvidia.com/gpu" | ||||
| GpuWorkDir = "/tmp/" | |||||
| NpuWorkDir = "/cache/" | |||||
| GpuWorkDir = "/tmp/" | |||||
| NpuWorkDir = "/cache/" | |||||
| NpuLocalLogUrl = "/tmp/train.log" | |||||
| CommandPrepareScriptNpu = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;" | |||||
| CodeArchiveName = "master.zip" | CodeArchiveName = "master.zip" | ||||
| BucketRemote = "grampus" | |||||
| RemoteModelPath = "/output/" + models.ModelSuffix | |||||
| ) | ) | ||||
| var ( | var ( | ||||
| @@ -33,7 +37,7 @@ var ( | |||||
| SpecialPools *models.SpecialPools | SpecialPools *models.SpecialPools | ||||
| CommandPrepareScript = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://git.openi.org.cn/OpenIOSSG/%s/archive/master.zip;" + | |||||
| CommandPrepareScriptGpu = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://git.openi.org.cn/OpenIOSSG/%s/archive/master.zip;" + | |||||
| "echo \"finish loading script\";unzip -q master.zip;cd %s;chmod 777 downloader_for_obs uploader_for_npu downloader_for_minio uploader_for_gpu;" | "echo \"finish loading script\";unzip -q master.zip;cd %s;chmod 777 downloader_for_obs uploader_for_npu downloader_for_minio uploader_for_gpu;" | ||||
| ) | ) | ||||
| @@ -273,3 +277,35 @@ func InitSpecialPool() { | |||||
| json.Unmarshal([]byte(setting.Grampus.SpecialPools), &SpecialPools) | json.Unmarshal([]byte(setting.Grampus.SpecialPools), &SpecialPools) | ||||
| } | } | ||||
| } | } | ||||
| func GetNpuModelRemoteObsUrl(jobName string) string { | |||||
| return "s3:///" + BucketRemote + "/" + GetNpuModelObjectKey(jobName) | |||||
| } | |||||
| func GetNpuModelObjectKey(jobName string) string { | |||||
| return setting.CodePathPrefix + jobName + RemoteModelPath | |||||
| } | |||||
| func GetRemoteEndPoint(aiCenterID string) string { | |||||
| var endPoint string | |||||
| for _, info := range setting.CenterInfos.Info { | |||||
| if info.CenterID == aiCenterID { | |||||
| endPoint = info.Endpoint | |||||
| break | |||||
| } | |||||
| } | |||||
| return endPoint | |||||
| } | |||||
| func GetCenterProxy(aiCenterID string) string { | |||||
| var proxy string | |||||
| for _, info := range setting.CenterInfos.Info { | |||||
| if info.CenterID == aiCenterID { | |||||
| proxy = info.StorageProxyServer | |||||
| break | |||||
| } | |||||
| } | |||||
| return proxy | |||||
| } | |||||
| @@ -143,6 +143,8 @@ type GenerateInferenceJobReq struct { | |||||
| Spec *models.Specification | Spec *models.Specification | ||||
| DatasetName string | DatasetName string | ||||
| JobType string | JobType string | ||||
| UserImageUrl string | |||||
| UserCommand string | |||||
| } | } | ||||
| type VersionInfo struct { | type VersionInfo struct { | ||||
| @@ -682,26 +684,51 @@ func GetOutputPathByCount(TotalVersionCount int) (VersionOutputPath string) { | |||||
| func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (jobId string, err error) { | func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (jobId string, err error) { | ||||
| createTime := timeutil.TimeStampNow() | createTime := timeutil.TimeStampNow() | ||||
| jobResult, err := createInferenceJob(models.CreateInferenceJobParams{ | |||||
| JobName: req.JobName, | |||||
| Description: req.Description, | |||||
| InfConfig: models.InfConfig{ | |||||
| WorkServerNum: req.WorkServerNumber, | |||||
| AppUrl: req.CodeObsPath, | |||||
| BootFileUrl: req.BootFileUrl, | |||||
| DataUrl: req.DataUrl, | |||||
| EngineID: req.EngineID, | |||||
| // TrainUrl: req.TrainUrl, | |||||
| LogUrl: req.LogUrl, | |||||
| PoolID: req.PoolID, | |||||
| CreateVersion: true, | |||||
| Flavor: models.Flavor{ | |||||
| Code: req.Spec.SourceSpecId, | |||||
| var jobResult *models.CreateTrainJobResult | |||||
| var createErr error | |||||
| if req.EngineID < 0 { | |||||
| jobResult, createErr = createInferenceJobUserImage(models.CreateInfUserImageParams{ | |||||
| JobName: req.JobName, | |||||
| Description: req.Description, | |||||
| Config: models.InfUserImageConfig{ | |||||
| WorkServerNum: req.WorkServerNumber, | |||||
| AppUrl: req.CodeObsPath, | |||||
| BootFileUrl: req.BootFileUrl, | |||||
| DataUrl: req.DataUrl, | |||||
| // TrainUrl: req.TrainUrl, | |||||
| LogUrl: req.LogUrl, | |||||
| PoolID: req.PoolID, | |||||
| CreateVersion: true, | |||||
| Flavor: models.Flavor{ | |||||
| Code: req.Spec.SourceSpecId, | |||||
| }, | |||||
| Parameter: req.Parameters, | |||||
| UserImageUrl: req.UserImageUrl, | |||||
| UserCommand: req.UserCommand, | |||||
| }, | }, | ||||
| Parameter: req.Parameters, | |||||
| }, | |||||
| }) | |||||
| if err != nil { | |||||
| }) | |||||
| } else { | |||||
| jobResult, createErr = createInferenceJob(models.CreateInferenceJobParams{ | |||||
| JobName: req.JobName, | |||||
| Description: req.Description, | |||||
| InfConfig: models.InfConfig{ | |||||
| WorkServerNum: req.WorkServerNumber, | |||||
| AppUrl: req.CodeObsPath, | |||||
| BootFileUrl: req.BootFileUrl, | |||||
| DataUrl: req.DataUrl, | |||||
| EngineID: req.EngineID, | |||||
| // TrainUrl: req.TrainUrl, | |||||
| LogUrl: req.LogUrl, | |||||
| PoolID: req.PoolID, | |||||
| CreateVersion: true, | |||||
| Flavor: models.Flavor{ | |||||
| Code: req.Spec.SourceSpecId, | |||||
| }, | |||||
| Parameter: req.Parameters, | |||||
| }, | |||||
| }) | |||||
| } | |||||
| if createErr != nil { | |||||
| log.Error("createInferenceJob failed: %v", err.Error()) | log.Error("createInferenceJob failed: %v", err.Error()) | ||||
| if strings.HasPrefix(err.Error(), UnknownErrorPrefix) { | if strings.HasPrefix(err.Error(), UnknownErrorPrefix) { | ||||
| log.Info("(%s)unknown error, set temp status", req.DisplayJobName) | log.Info("(%s)unknown error, set temp status", req.DisplayJobName) | ||||
| @@ -1197,6 +1197,66 @@ sendjob: | |||||
| return &result, nil | return &result, nil | ||||
| } | } | ||||
| func createInferenceJobUserImage(createJobParams models.CreateInfUserImageParams) (*models.CreateTrainJobResult, error) { | |||||
| checkSetting() | |||||
| client := getRestyClient() | |||||
| var result models.CreateTrainJobResult | |||||
| retry := 0 | |||||
| sendjob: | |||||
| res, err := client.R(). | |||||
| SetHeader("Content-Type", "application/json"). | |||||
| SetAuthToken(TOKEN). | |||||
| SetBody(createJobParams). | |||||
| SetResult(&result). | |||||
| Post(HOST + "/v1/" + setting.ProjectID + urlTrainJob) | |||||
| if err != nil { | |||||
| return nil, fmt.Errorf("resty create train-job: %s", err) | |||||
| } | |||||
| req, _ := json.Marshal(createJobParams) | |||||
| log.Info("%s", req) | |||||
| if res.StatusCode() == http.StatusUnauthorized && retry < 1 { | |||||
| retry++ | |||||
| _ = getToken() | |||||
| goto sendjob | |||||
| } | |||||
| if res.StatusCode() != http.StatusOK { | |||||
| var temp models.ErrorResult | |||||
| if err = json.Unmarshal([]byte(res.String()), &temp); err != nil { | |||||
| log.Error("json.Unmarshal failed(%s): %v", res.String(), err.Error()) | |||||
| return &result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error()) | |||||
| } | |||||
| log.Error("createInferenceJobUserImage failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) | |||||
| bootFileErrorMsg := "Invalid OBS path '" + createJobParams.Config.BootFileUrl + "'." | |||||
| dataSetErrorMsg := "Invalid OBS path '" + createJobParams.Config.DataUrl + "'." | |||||
| if temp.ErrorMsg == bootFileErrorMsg { | |||||
| log.Error("启动文件错误!createInferenceJobUserImage failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) | |||||
| return &result, fmt.Errorf("启动文件错误!") | |||||
| } | |||||
| if temp.ErrorMsg == dataSetErrorMsg { | |||||
| log.Error("数据集错误!createInferenceJobUserImage failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) | |||||
| return &result, fmt.Errorf("数据集错误!") | |||||
| } | |||||
| if res.StatusCode() == http.StatusBadGateway { | |||||
| return &result, fmt.Errorf(UnknownErrorPrefix+"createInferenceJobUserImage failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) | |||||
| } else { | |||||
| return &result, fmt.Errorf("createInferenceJobUserImage failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) | |||||
| } | |||||
| } | |||||
| if !result.IsSuccess { | |||||
| log.Error("createInferenceJobUserImage failed(%s): %s", result.ErrorCode, result.ErrorMsg) | |||||
| return &result, fmt.Errorf("createInferenceJobUserImage failed(%s): %s", result.ErrorCode, result.ErrorMsg) | |||||
| } | |||||
| return &result, nil | |||||
| } | |||||
| func createNotebook2(createJobParams models.CreateNotebook2Params) (*models.CreateNotebookResult, error) { | func createNotebook2(createJobParams models.CreateNotebook2Params) (*models.CreateNotebookResult, error) { | ||||
| checkSetting() | checkSetting() | ||||
| client := getRestyClient() | client := getRestyClient() | ||||
| @@ -76,6 +76,17 @@ type C2NetSqInfos struct { | |||||
| C2NetSqInfo []*C2NetSequenceInfo `json:"sequence"` | C2NetSqInfo []*C2NetSequenceInfo `json:"sequence"` | ||||
| } | } | ||||
| type AiCenterInfo struct { | |||||
| CenterID string `json:"center_id"` | |||||
| Name string `json:"name"` | |||||
| Endpoint string `json:"endpoint"` | |||||
| StorageProxyServer string `json:"storage_proxy_server"` | |||||
| } | |||||
| type AiCenterInfos struct { | |||||
| Info []*AiCenterInfo `json:"infos"` | |||||
| } | |||||
| type StFlavorInfos struct { | type StFlavorInfos struct { | ||||
| FlavorInfo []*FlavorInfo `json:"flavor_info"` | FlavorInfo []*FlavorInfo `json:"flavor_info"` | ||||
| } | } | ||||
| @@ -594,9 +605,13 @@ var ( | |||||
| SpecialPools string | SpecialPools string | ||||
| C2NetSequence string | C2NetSequence string | ||||
| SyncScriptProject string | SyncScriptProject string | ||||
| LocalCenterID string | |||||
| AiCenterInfo string | |||||
| }{} | }{} | ||||
| C2NetInfos *C2NetSqInfos | |||||
| C2NetInfos *C2NetSqInfos | |||||
| CenterInfos *AiCenterInfos | |||||
| C2NetMapInfo map[string]*C2NetSequenceInfo | |||||
| //elk config | //elk config | ||||
| ElkUrl string | ElkUrl string | ||||
| @@ -701,8 +716,12 @@ var ( | |||||
| GPU_PYTORCH_IMAGE string | GPU_PYTORCH_IMAGE string | ||||
| GpuQueue string | GpuQueue string | ||||
| GPU_TENSORFLOW_IMAGE string | GPU_TENSORFLOW_IMAGE string | ||||
| GPU_PADDLE_IMAGE string | |||||
| GPU_MXNET_IMAGE string | |||||
| NPU_MINDSPORE_16_IMAGE string | NPU_MINDSPORE_16_IMAGE string | ||||
| PytorchOnnxBootFile string | PytorchOnnxBootFile string | ||||
| PaddleOnnxBootFile string | |||||
| MXnetOnnxBootFile string | |||||
| PytorchTrTBootFile string | PytorchTrTBootFile string | ||||
| MindsporeBootFile string | MindsporeBootFile string | ||||
| TensorFlowNpuBootFile string | TensorFlowNpuBootFile string | ||||
| @@ -1596,6 +1615,10 @@ func getModelConvertConfig() { | |||||
| ModelConvert.NPU_PoolID = sec.Key("NPU_PoolID").MustString("pool7908321a") | ModelConvert.NPU_PoolID = sec.Key("NPU_PoolID").MustString("pool7908321a") | ||||
| ModelConvert.NPU_MINDSPORE_IMAGE_ID = sec.Key("NPU_MINDSPORE_IMAGE_ID").MustInt(121) | ModelConvert.NPU_MINDSPORE_IMAGE_ID = sec.Key("NPU_MINDSPORE_IMAGE_ID").MustInt(121) | ||||
| ModelConvert.NPU_TENSORFLOW_IMAGE_ID = sec.Key("NPU_TENSORFLOW_IMAGE_ID").MustInt(35) | ModelConvert.NPU_TENSORFLOW_IMAGE_ID = sec.Key("NPU_TENSORFLOW_IMAGE_ID").MustInt(35) | ||||
| ModelConvert.GPU_PADDLE_IMAGE = sec.Key("GPU_PADDLE_IMAGE").MustString("dockerhub.pcl.ac.cn:5000/user-images/openi:paddle2.3.0_gpu_cuda11.2_cudnn8") | |||||
| ModelConvert.GPU_MXNET_IMAGE = sec.Key("GPU_MXNET_IMAGE").MustString("dockerhub.pcl.ac.cn:5000/user-images/openi:mxnet191cu_cuda102_py37") | |||||
| ModelConvert.PaddleOnnxBootFile = sec.Key("PaddleOnnxBootFile").MustString("convert_paddle.py") | |||||
| ModelConvert.MXnetOnnxBootFile = sec.Key("MXnetOnnxBootFile").MustString("convert_mxnet.py") | |||||
| } | } | ||||
| func getModelAppConfig() { | func getModelAppConfig() { | ||||
| @@ -1632,8 +1655,19 @@ func getGrampusConfig() { | |||||
| if err := json.Unmarshal([]byte(Grampus.C2NetSequence), &C2NetInfos); err != nil { | if err := json.Unmarshal([]byte(Grampus.C2NetSequence), &C2NetInfos); err != nil { | ||||
| log.Error("Unmarshal(C2NetSequence) failed:%v", err) | log.Error("Unmarshal(C2NetSequence) failed:%v", err) | ||||
| } | } | ||||
| C2NetMapInfo=make(map[string]*C2NetSequenceInfo) | |||||
| for _,value :=range C2NetInfos.C2NetSqInfo{ | |||||
| C2NetMapInfo[value.Name]=value | |||||
| } | |||||
| } | } | ||||
| Grampus.SyncScriptProject = sec.Key("SYNC_SCRIPT_PROJECT").MustString("script_for_grampus") | Grampus.SyncScriptProject = sec.Key("SYNC_SCRIPT_PROJECT").MustString("script_for_grampus") | ||||
| Grampus.LocalCenterID = sec.Key("LOCAL_CENTER_ID").MustString("cloudbrain2") | |||||
| Grampus.AiCenterInfo = sec.Key("AI_CENTER_INFO").MustString("") | |||||
| if Grampus.AiCenterInfo != "" { | |||||
| if err := json.Unmarshal([]byte(Grampus.AiCenterInfo), &CenterInfos); err != nil { | |||||
| log.Error("Unmarshal(AiCenterInfo) failed:%v", err) | |||||
| } | |||||
| } | |||||
| } | } | ||||
| @@ -470,47 +470,43 @@ func GetObsListObject(jobName, outPutPath, parentDir, versionName string) ([]Fil | |||||
| input := &obs.ListObjectsInput{} | input := &obs.ListObjectsInput{} | ||||
| input.Bucket = setting.Bucket | input.Bucket = setting.Bucket | ||||
| input.Prefix = strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, outPutPath, versionName, parentDir), "/") | input.Prefix = strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, outPutPath, versionName, parentDir), "/") | ||||
| log.Info("bucket=" + input.Bucket + " Prefix=" + input.Prefix) | |||||
| strPrefix := strings.Split(input.Prefix, "/") | |||||
| if !strings.HasSuffix(input.Prefix, "/") { | |||||
| input.Prefix += "/" | |||||
| } | |||||
| output, err := ObsCli.ListObjects(input) | output, err := ObsCli.ListObjects(input) | ||||
| fileInfos := make([]FileInfo, 0) | fileInfos := make([]FileInfo, 0) | ||||
| prefixLen := len(input.Prefix) | |||||
| fileMap := make(map[string]bool, 0) | |||||
| if err == nil { | if err == nil { | ||||
| for _, val := range output.Contents { | for _, val := range output.Contents { | ||||
| str1 := strings.Split(val.Key, "/") | |||||
| log.Info("val key=" + val.Key) | |||||
| var isDir bool | var isDir bool | ||||
| var fileName, nextParentDir string | |||||
| if strings.HasSuffix(val.Key, "/") { | |||||
| //dirs in next level dir | |||||
| if len(str1)-len(strPrefix) > 2 { | |||||
| continue | |||||
| } | |||||
| fileName = str1[len(str1)-2] | |||||
| var fileName string | |||||
| if val.Key == input.Prefix { | |||||
| continue | |||||
| } | |||||
| fileName = val.Key[prefixLen:] | |||||
| log.Info("fileName =" + fileName) | |||||
| files := strings.Split(fileName, "/") | |||||
| if fileMap[files[0]] { | |||||
| continue | |||||
| } else { | |||||
| fileMap[files[0]] = true | |||||
| } | |||||
| ParenDir := parentDir | |||||
| fileName = files[0] | |||||
| if len(files) > 1 { | |||||
| isDir = true | isDir = true | ||||
| if parentDir == "" { | |||||
| nextParentDir = fileName | |||||
| } else { | |||||
| nextParentDir = parentDir + "/" + fileName | |||||
| } | |||||
| if fileName == strPrefix[len(strPrefix)-1] || (fileName+"/") == outPutPath { | |||||
| continue | |||||
| } | |||||
| ParenDir += fileName + "/" | |||||
| } else { | } else { | ||||
| //files in next level dir | |||||
| if len(str1)-len(strPrefix) > 1 { | |||||
| continue | |||||
| } | |||||
| fileName = str1[len(str1)-1] | |||||
| isDir = false | isDir = false | ||||
| nextParentDir = parentDir | |||||
| } | } | ||||
| fileInfo := FileInfo{ | fileInfo := FileInfo{ | ||||
| ModTime: val.LastModified.Local().Format("2006-01-02 15:04:05"), | ModTime: val.LastModified.Local().Format("2006-01-02 15:04:05"), | ||||
| FileName: fileName, | FileName: fileName, | ||||
| Size: val.Size, | Size: val.Size, | ||||
| IsDir: isDir, | IsDir: isDir, | ||||
| ParenDir: nextParentDir, | |||||
| ParenDir: ParenDir, | |||||
| } | } | ||||
| fileInfos = append(fileInfos, fileInfo) | fileInfos = append(fileInfos, fileInfo) | ||||
| } | } | ||||
| @@ -15,6 +15,7 @@ type Organization struct { | |||||
| Location string `json:"location"` | Location string `json:"location"` | ||||
| Visibility string `json:"visibility"` | Visibility string `json:"visibility"` | ||||
| RepoAdminChangeTeamAccess bool `json:"repo_admin_change_team_access"` | RepoAdminChangeTeamAccess bool `json:"repo_admin_change_team_access"` | ||||
| NumRepos int `json:"num_repos"` | |||||
| } | } | ||||
| // CreateOrgOption options for creating an organization | // CreateOrgOption options for creating an organization | ||||
| @@ -90,6 +90,7 @@ type Repository struct { | |||||
| AllowRebaseMerge bool `json:"allow_rebase_explicit"` | AllowRebaseMerge bool `json:"allow_rebase_explicit"` | ||||
| AllowSquash bool `json:"allow_squash_merge"` | AllowSquash bool `json:"allow_squash_merge"` | ||||
| AvatarURL string `json:"avatar_url"` | AvatarURL string `json:"avatar_url"` | ||||
| Status int `json:"status"` | |||||
| } | } | ||||
| // CreateRepoOption options when creating repository | // CreateRepoOption options when creating repository | ||||
| @@ -0,0 +1,93 @@ | |||||
| /* | |||||
| * Copyright 2020 The Dragonfly Authors | |||||
| * | |||||
| * Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| * you may not use this file except in compliance with the License. | |||||
| * You may obtain a copy of the License at | |||||
| * | |||||
| * http://www.apache.org/licenses/LICENSE-2.0 | |||||
| * | |||||
| * Unless required by applicable law or agreed to in writing, software | |||||
| * distributed under the License is distributed on an "AS IS" BASIS, | |||||
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| * See the License for the specific language governing permissions and | |||||
| * limitations under the License. | |||||
| */ | |||||
| package config | |||||
| import ( | |||||
| "time" | |||||
| ) | |||||
| // Reason of backing to source. | |||||
| const ( | |||||
| BackSourceReasonNone = 0 | |||||
| BackSourceReasonRegisterFail = 1 | |||||
| BackSourceReasonMd5NotMatch = 2 | |||||
| BackSourceReasonDownloadError = 3 | |||||
| BackSourceReasonNoSpace = 4 | |||||
| BackSourceReasonInitError = 5 | |||||
| BackSourceReasonWriteError = 6 | |||||
| BackSourceReasonHostSysError = 7 | |||||
| BackSourceReasonNodeEmpty = 8 | |||||
| BackSourceReasonSourceError = 10 | |||||
| BackSourceReasonUserSpecified = 100 | |||||
| ForceNotBackSourceAddition = 1000 | |||||
| ) | |||||
| // Download pattern. | |||||
| const ( | |||||
| PatternP2P = "p2p" | |||||
| PatternSeedPeer = "seed-peer" | |||||
| PatternSource = "source" | |||||
| ) | |||||
| //// Download limit. | |||||
| //const ( | |||||
| // DefaultPerPeerDownloadLimit = 20 * unit.MB | |||||
| // DefaultTotalDownloadLimit = 100 * unit.MB | |||||
| // DefaultUploadLimit = 100 * unit.MB | |||||
| // DefaultMinRate = 20 * unit.MB | |||||
| //) | |||||
| // Others. | |||||
| const ( | |||||
| DefaultTimestampFormat = "2006-01-02 15:04:05" | |||||
| SchemaHTTP = "http" | |||||
| DefaultTaskExpireTime = 6 * time.Hour | |||||
| DefaultGCInterval = 1 * time.Minute | |||||
| DefaultDaemonAliveTime = 5 * time.Minute | |||||
| DefaultScheduleTimeout = 5 * time.Minute | |||||
| DefaultDownloadTimeout = 5 * time.Minute | |||||
| DefaultSchedulerSchema = "http" | |||||
| DefaultSchedulerIP = "127.0.0.1" | |||||
| DefaultSchedulerPort = 8002 | |||||
| DefaultPieceChanSize = 16 | |||||
| DefaultObjectMaxReplicas = 3 | |||||
| ) | |||||
| // Dfcache subcommand names. | |||||
| const ( | |||||
| CmdStat = "stat" | |||||
| CmdImport = "import" | |||||
| CmdExport = "export" | |||||
| CmdDelete = "delete" | |||||
| ) | |||||
| // Service defalut port of listening. | |||||
| const ( | |||||
| DefaultEndPort = 65535 | |||||
| DefaultPeerStartPort = 65000 | |||||
| DefaultUploadStartPort = 65002 | |||||
| DefaultObjectStorageStartPort = 65004 | |||||
| DefaultHealthyStartPort = 40901 | |||||
| ) | |||||
| var ( | |||||
| // DefaultCertValidityPeriod is default validity period of certificate. | |||||
| DefaultCertValidityPeriod = 180 * 24 * time.Hour | |||||
| ) | |||||
| @@ -0,0 +1,66 @@ | |||||
| /* | |||||
| * Copyright 2022 The Dragonfly Authors | |||||
| * | |||||
| * Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| * you may not use this file except in compliance with the License. | |||||
| * You may obtain a copy of the License at | |||||
| * | |||||
| * http://www.apache.org/licenses/LICENSE-2.0 | |||||
| * | |||||
| * Unless required by applicable law or agreed to in writing, software | |||||
| * distributed under the License is distributed on an "AS IS" BASIS, | |||||
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| * See the License for the specific language governing permissions and | |||||
| * limitations under the License. | |||||
| */ | |||||
| package config | |||||
| import ( | |||||
| "errors" | |||||
| "fmt" | |||||
| "net/url" | |||||
| ) | |||||
| type DfstoreConfig struct { | |||||
| // Address of the object storage service. | |||||
| Endpoint string `yaml:"endpoint,omitempty" mapstructure:"endpoint,omitempty"` | |||||
| // Filter is used to generate a unique Task ID by | |||||
| // filtering unnecessary query params in the URL, | |||||
| // it is separated by & character. | |||||
| Filter string `yaml:"filter,omitempty" mapstructure:"filter,omitempty"` | |||||
| // Mode is the mode in which the backend is written, | |||||
| // including WriteBack and AsyncWriteBack. | |||||
| Mode int `yaml:"mode,omitempty" mapstructure:"mode,omitempty"` | |||||
| // MaxReplicas is the maximum number of | |||||
| // replicas of an object cache in seed peers. | |||||
| MaxReplicas int `yaml:"maxReplicas,omitempty" mapstructure:"mode,maxReplicas"` | |||||
| } | |||||
| // New dfstore configuration. | |||||
| func NewDfstore() *DfstoreConfig { | |||||
| url := url.URL{ | |||||
| Scheme: "http", | |||||
| Host: fmt.Sprintf("%s:%d", "127.0.0.1", DefaultObjectStorageStartPort), | |||||
| } | |||||
| return &DfstoreConfig{ | |||||
| Endpoint: url.String(), | |||||
| MaxReplicas: DefaultObjectMaxReplicas, | |||||
| } | |||||
| } | |||||
| func (cfg *DfstoreConfig) Validate() error { | |||||
| if cfg.Endpoint == "" { | |||||
| return errors.New("dfstore requires parameter endpoint") | |||||
| } | |||||
| if _, err := url.ParseRequestURI(cfg.Endpoint); err != nil { | |||||
| return fmt.Errorf("invalid endpoint: %w", err) | |||||
| } | |||||
| return nil | |||||
| } | |||||
| @@ -0,0 +1,32 @@ | |||||
| /* | |||||
| * Copyright 2020 The Dragonfly Authors | |||||
| * | |||||
| * Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| * you may not use this file except in compliance with the License. | |||||
| * You may obtain a copy of the License at | |||||
| * | |||||
| * http://www.apache.org/licenses/LICENSE-2.0 | |||||
| * | |||||
| * Unless required by applicable law or agreed to in writing, software | |||||
| * distributed under the License is distributed on an "AS IS" BASIS, | |||||
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| * See the License for the specific language governing permissions and | |||||
| * limitations under the License. | |||||
| */ | |||||
| package config | |||||
| const ( | |||||
| HeaderDragonflyFilter = "X-Dragonfly-Filter" | |||||
| HeaderDragonflyPeer = "X-Dragonfly-Peer" | |||||
| HeaderDragonflyTask = "X-Dragonfly-Task" | |||||
| HeaderDragonflyRange = "X-Dragonfly-Range" | |||||
| // HeaderDragonflyTag different HeaderDragonflyTag for the same url will be divided into different P2P overlay | |||||
| HeaderDragonflyTag = "X-Dragonfly-Tag" | |||||
| // HeaderDragonflyApplication is used for statistics and traffic control | |||||
| HeaderDragonflyApplication = "X-Dragonfly-Application" | |||||
| // HeaderDragonflyRegistry is used for dynamic registry mirrors. | |||||
| HeaderDragonflyRegistry = "X-Dragonfly-Registry" | |||||
| // HeaderDragonflyObjectMetaDigest is used for digest of object storage. | |||||
| HeaderDragonflyObjectMetaDigest = "X-Dragonfly-Object-Meta-Digest" | |||||
| ) | |||||
| @@ -0,0 +1,307 @@ | |||||
| package dfstore | |||||
| import ( | |||||
| "context" | |||||
| "errors" | |||||
| "fmt" | |||||
| "github.com/go-http-utils/headers" | |||||
| "io" | |||||
| "net/http" | |||||
| "net/url" | |||||
| "path" | |||||
| "strconv" | |||||
| "code.gitea.io/gitea/modules/urfs_client/config" | |||||
| pkgobjectstorage "code.gitea.io/gitea/modules/urfs_client/objectstorage" | |||||
| ) | |||||
| // Dfstore is the interface used for object storage. | |||||
| type Dfstore interface { | |||||
| // GetUrfsMetadataRequestWithContext returns *http.Request of getting Urfs metadata. | |||||
| GetUrfsMetadataRequestWithContext(ctx context.Context, input *GetUrfsMetadataInput) (*http.Request, error) | |||||
| // GetUrfsMetadataWithContext returns matedata of Urfs. | |||||
| GetUrfsMetadataWithContext(ctx context.Context, input *GetUrfsMetadataInput) (*pkgobjectstorage.ObjectMetadata, error) | |||||
| // GetUrfsRequestWithContext returns *http.Request of getting Urfs. | |||||
| GetUrfsRequestWithContext(ctx context.Context, input *GetUrfsInput) (*http.Request, error) | |||||
| // GetUrfsWithContext returns data of Urfs. | |||||
| GetUrfsWithContext(ctx context.Context, input *GetUrfsInput) (io.ReadCloser, error) | |||||
| // GetUrfsStatusRequestWithContext returns *http.Request of getting Urfs status. | |||||
| GetUrfsStatusRequestWithContext(ctx context.Context, input *GetUrfsInput) (*http.Request, error) | |||||
| // GetUrfsStatusWithContext returns schedule status of Urfs. | |||||
| GetUrfsStatusWithContext(ctx context.Context, input *GetUrfsInput) (io.ReadCloser, error) | |||||
| } | |||||
| // dfstore provides object storage function. | |||||
| type dfstore struct { | |||||
| endpoint string | |||||
| httpClient *http.Client | |||||
| } | |||||
| // Option is a functional option for configuring the dfstore. | |||||
| type Option func(dfs *dfstore) | |||||
| // New dfstore instance. | |||||
| func New(endpoint string, options ...Option) Dfstore { | |||||
| dfs := &dfstore{ | |||||
| endpoint: endpoint, | |||||
| httpClient: http.DefaultClient, | |||||
| } | |||||
| for _, opt := range options { | |||||
| opt(dfs) | |||||
| } | |||||
| return dfs | |||||
| } | |||||
| // GetUrfsMetadataInput is used to construct request of getting object metadata. | |||||
| type GetUrfsMetadataInput struct { | |||||
| // Endpoint is endpoint name. | |||||
| Endpoint string | |||||
| // BucketName is bucket name. | |||||
| BucketName string | |||||
| // ObjectKey is object key. | |||||
| ObjectKey string | |||||
| // DstPeer is target peerHost. | |||||
| DstPeer string | |||||
| } | |||||
| // Validate validates GetUrfsMetadataInput fields. | |||||
| func (i *GetUrfsMetadataInput) Validate() error { | |||||
| if i.Endpoint == "" { | |||||
| return errors.New("invalid Endpoint") | |||||
| } | |||||
| if i.BucketName == "" { | |||||
| return errors.New("invalid BucketName") | |||||
| } | |||||
| if i.ObjectKey == "" { | |||||
| return errors.New("invalid ObjectKey") | |||||
| } | |||||
| return nil | |||||
| } | |||||
| // GetObjectMetadataRequestWithContext returns *http.Request of getting object metadata. | |||||
| func (dfs *dfstore) GetUrfsMetadataRequestWithContext(ctx context.Context, input *GetUrfsMetadataInput) (*http.Request, error) { | |||||
| if err := input.Validate(); err != nil { | |||||
| return nil, err | |||||
| } | |||||
| dstUrl := url.URL{ | |||||
| Scheme: "http", | |||||
| Host: fmt.Sprintf("%s:%d", input.DstPeer, config.DefaultObjectStorageStartPort), | |||||
| } | |||||
| u, err := url.Parse(dstUrl.String()) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| u.Path = path.Join("buckets", input.BucketName+"."+input.Endpoint, "objects", input.ObjectKey) | |||||
| req, err := http.NewRequestWithContext(ctx, http.MethodHead, u.String(), nil) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return req, nil | |||||
| } | |||||
| // GetObjectMetadataWithContext returns metadata of object. | |||||
| func (dfs *dfstore) GetUrfsMetadataWithContext(ctx context.Context, input *GetUrfsMetadataInput) (*pkgobjectstorage.ObjectMetadata, error) { | |||||
| req, err := dfs.GetUrfsMetadataRequestWithContext(ctx, input) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| resp, err := dfs.httpClient.Do(req) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| defer resp.Body.Close() | |||||
| if resp.StatusCode/100 != 2 { | |||||
| return nil, fmt.Errorf("bad response status %s", resp.Status) | |||||
| } | |||||
| contentLength, err := strconv.ParseInt(resp.Header.Get(headers.ContentLength), 10, 64) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return &pkgobjectstorage.ObjectMetadata{ | |||||
| ContentDisposition: resp.Header.Get(headers.ContentDisposition), | |||||
| ContentEncoding: resp.Header.Get(headers.ContentEncoding), | |||||
| ContentLanguage: resp.Header.Get(headers.ContentLanguage), | |||||
| ContentLength: int64(contentLength), | |||||
| ContentType: resp.Header.Get(headers.ContentType), | |||||
| ETag: resp.Header.Get(headers.ContentType), | |||||
| Digest: resp.Header.Get(config.HeaderDragonflyObjectMetaDigest), | |||||
| }, nil | |||||
| } | |||||
| // GetUrfsInput is used to construct request of getting object. | |||||
| type GetUrfsInput struct { | |||||
| // Endpoint is endpoint name. | |||||
| Endpoint string | |||||
| // BucketName is bucket name. | |||||
| BucketName string | |||||
| // ObjectKey is object key. | |||||
| ObjectKey string | |||||
| // Filter is used to generate a unique Task ID by | |||||
| // filtering unnecessary query params in the URL, | |||||
| // it is separated by & character. | |||||
| Filter string | |||||
| // Range is the HTTP range header. | |||||
| Range string | |||||
| // DstPeer is target peerHost. | |||||
| DstPeer string | |||||
| } | |||||
| // GetObjectWithContext returns data of object. | |||||
| func (dfs *dfstore) GetUrfsWithContext(ctx context.Context, input *GetUrfsInput) (io.ReadCloser, error) { | |||||
| req, err := dfs.GetUrfsRequestWithContext(ctx, input) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| resp, err := dfs.httpClient.Do(req) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| if resp.StatusCode/100 != 2 { | |||||
| return nil, fmt.Errorf("bad response status %s", resp.Status) | |||||
| } | |||||
| return resp.Body, nil | |||||
| } | |||||
| // GetObjectRequestWithContext returns *http.Request of getting object. | |||||
| func (dfs *dfstore) GetUrfsRequestWithContext(ctx context.Context, input *GetUrfsInput) (*http.Request, error) { | |||||
| if err := input.Validate(); err != nil { | |||||
| return nil, err | |||||
| } | |||||
| dstUrl := url.URL{ | |||||
| Scheme: "http", | |||||
| Host: fmt.Sprintf("%s:%d", input.DstPeer, config.DefaultObjectStorageStartPort), | |||||
| } | |||||
| u, err := url.Parse(dstUrl.String()) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| u.Path = path.Join("buckets", input.BucketName+"."+input.Endpoint, "cache_object", input.ObjectKey) | |||||
| query := u.Query() | |||||
| if input.Filter != "" { | |||||
| query.Set("filter", input.Filter) | |||||
| } | |||||
| u.RawQuery = query.Encode() | |||||
| req, err := http.NewRequestWithContext(ctx, http.MethodPost, u.String(), nil) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| if input.Range != "" { | |||||
| req.Header.Set(headers.Range, input.Range) | |||||
| } | |||||
| return req, nil | |||||
| } | |||||
| // Validate validates GetUrfsInput fields. | |||||
| func (i *GetUrfsInput) Validate() error { | |||||
| if i.Endpoint == "" { | |||||
| return errors.New("invalid Endpoint") | |||||
| } | |||||
| if i.BucketName == "" { | |||||
| return errors.New("invalid BucketName") | |||||
| } | |||||
| if i.ObjectKey == "" { | |||||
| return errors.New("invalid ObjectKey") | |||||
| } | |||||
| return nil | |||||
| } | |||||
| // GetUrfsStatusWithContext returns schedule task status. | |||||
| func (dfs *dfstore) GetUrfsStatusWithContext(ctx context.Context, input *GetUrfsInput) (io.ReadCloser, error) { | |||||
| req, err := dfs.GetUrfsStatusRequestWithContext(ctx, input) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| resp, err := dfs.httpClient.Do(req) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| if resp.StatusCode/100 != 2 { | |||||
| return nil, fmt.Errorf("bad response status %s", resp.Status) | |||||
| } | |||||
| return resp.Body, nil | |||||
| } | |||||
| // GetObjectStatusRequestWithContext returns *http.Request of check schedule task status. | |||||
| func (dfs *dfstore) GetUrfsStatusRequestWithContext(ctx context.Context, input *GetUrfsInput) (*http.Request, error) { | |||||
| if err := input.Validate(); err != nil { | |||||
| return nil, err | |||||
| } | |||||
| dstUrl := url.URL{ | |||||
| Scheme: "http", | |||||
| Host: fmt.Sprintf("%s:%d", input.DstPeer, config.DefaultObjectStorageStartPort), | |||||
| } | |||||
| u, err := url.Parse(dstUrl.String()) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| u.Path = path.Join("buckets", input.BucketName+"."+input.Endpoint, "check_object", input.ObjectKey) | |||||
| query := u.Query() | |||||
| if input.Filter != "" { | |||||
| query.Set("filter", input.Filter) | |||||
| } | |||||
| u.RawQuery = query.Encode() | |||||
| req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| if input.Range != "" { | |||||
| req.Header.Set(headers.Range, input.Range) | |||||
| } | |||||
| return req, nil | |||||
| } | |||||
| @@ -0,0 +1,5 @@ | |||||
| // Code generated by MockGen. DO NOT EDIT. | |||||
| // Source: objectstorage.go | |||||
| // Package mocks is a generated GoMock package. | |||||
| package mocks | |||||
| @@ -0,0 +1,47 @@ | |||||
| /* | |||||
| * Copyright 2022 The Dragonfly Authors | |||||
| * | |||||
| * Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| * you may not use this file except in compliance with the License. | |||||
| * You may obtain a copy of the License at | |||||
| * | |||||
| * http://www.apache.org/licenses/LICENSE-2.0 | |||||
| * | |||||
| * Unless required by applicable law or agreed to in writing, software | |||||
| * distributed under the License is distributed on an "AS IS" BASIS, | |||||
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| * See the License for the specific language governing permissions and | |||||
| * limitations under the License. | |||||
| */ | |||||
| //go:generate mockgen -destination mocks/objectstorage_mock.go -source objectstorage.go -package mocks | |||||
| package objectstorage | |||||
| type ObjectMetadata struct { | |||||
| // Key is object key. | |||||
| Key string | |||||
| // ContentDisposition is Content-Disposition header. | |||||
| ContentDisposition string | |||||
| // ContentEncoding is Content-Encoding header. | |||||
| ContentEncoding string | |||||
| // ContentLanguage is Content-Language header. | |||||
| ContentLanguage string | |||||
| // ContentLanguage is Content-Length header. | |||||
| ContentLength int64 | |||||
| // ContentType is Content-Type header. | |||||
| ContentType string | |||||
| // ETag is ETag header. | |||||
| ETag string | |||||
| // Digest is object digest. | |||||
| Digest string | |||||
| } | |||||
| @@ -0,0 +1,112 @@ | |||||
| package urchin | |||||
| import ( | |||||
| "encoding/json" | |||||
| "fmt" | |||||
| "strings" | |||||
| "code.gitea.io/gitea/models" | |||||
| "code.gitea.io/gitea/modules/labelmsg" | |||||
| "code.gitea.io/gitea/modules/log" | |||||
| "code.gitea.io/gitea/modules/setting" | |||||
| ) | |||||
| type DecompressReq struct { | |||||
| SourceFile string `json:"source_file"` | |||||
| DestPath string `json:"dest_path"` | |||||
| } | |||||
| var urfsClient Urchinfs | |||||
| func getUrfsClient() { | |||||
| if urfsClient != nil { | |||||
| return | |||||
| } | |||||
| urfsClient = New() | |||||
| } | |||||
| func GetBackNpuModel(cloudbrainID int64, endpoint, bucket, objectKey, destPeerHost string) error { | |||||
| getUrfsClient() | |||||
| res, err := urfsClient.ScheduleDataToPeerByKey(endpoint, bucket, objectKey, destPeerHost) | |||||
| if err != nil { | |||||
| log.Error("ScheduleDataToPeerByKey failed:%v", err) | |||||
| return err | |||||
| } | |||||
| _, err = models.InsertScheduleRecord(&models.ScheduleRecord{ | |||||
| CloudbrainID: cloudbrainID, | |||||
| EndPoint: res.DataEndpoint, | |||||
| Bucket: res.DataRoot, | |||||
| ObjectKey: res.DataPath, | |||||
| ProxyServer: destPeerHost, | |||||
| Status: res.StatusCode, | |||||
| }) | |||||
| if err != nil { | |||||
| log.Error("InsertScheduleRecord failed:%v", err) | |||||
| return err | |||||
| } | |||||
| switch res.StatusCode { | |||||
| case models.StorageScheduleSucceed: | |||||
| log.Info("ScheduleDataToPeerByKey succeed") | |||||
| decompress(res.DataRoot+"/"+res.DataPath, setting.Bucket+"/"+strings.TrimSuffix(res.DataPath, models.ModelSuffix)) | |||||
| case models.StorageScheduleProcessing: | |||||
| log.Info("ScheduleDataToPeerByKey processing") | |||||
| case models.StorageScheduleFailed: | |||||
| log.Error("ScheduleDataToPeerByKey failed:%s", res.StatusMsg) | |||||
| return fmt.Errorf("GetBackNpuModel failed:%s", res.StatusMsg) | |||||
| default: | |||||
| log.Info("ScheduleDataToPeerByKey failed, unknown StatusCode:%d", res.StatusCode) | |||||
| return fmt.Errorf("GetBackNpuModel failed, unknow StatusCode:%d", res.StatusCode) | |||||
| } | |||||
| return nil | |||||
| } | |||||
| func HandleScheduleRecords() error { | |||||
| getUrfsClient() | |||||
| records, err := models.GetSchedulingRecord() | |||||
| if err != nil { | |||||
| log.Error("GetSchedulingRecord failed:%v", err) | |||||
| return err | |||||
| } | |||||
| for _, record := range records { | |||||
| res, err := urfsClient.CheckScheduleTaskStatusByKey(record.EndPoint, record.Bucket, record.ObjectKey, record.ProxyServer) | |||||
| if err != nil { | |||||
| log.Error("CheckScheduleTaskStatusByKey(%d) failed:%v", record.ID, err) | |||||
| continue | |||||
| } | |||||
| record.Status = res.StatusCode | |||||
| models.UpdateScheduleCols(record, "status") | |||||
| switch res.StatusCode { | |||||
| case models.StorageScheduleSucceed: | |||||
| log.Info("ScheduleDataToPeerByKey(%s) succeed", record.ObjectKey) | |||||
| decompress(record.Bucket+"/"+record.ObjectKey, setting.Bucket+"/"+strings.TrimSuffix(record.ObjectKey, models.ModelSuffix)) | |||||
| case models.StorageScheduleProcessing: | |||||
| log.Info("ScheduleDataToPeerByKey(%s) processing", record.ObjectKey) | |||||
| case models.StorageScheduleFailed: | |||||
| log.Error("ScheduleDataToPeerByKey(%s) failed:%s", record.ObjectKey, res.StatusMsg) | |||||
| default: | |||||
| log.Info("ScheduleDataToPeerByKey(%s) failed, unknown StatusCode:%d", record.ObjectKey, res.StatusCode) | |||||
| } | |||||
| } | |||||
| return nil | |||||
| } | |||||
| func decompress(sourceFile, destPath string) { | |||||
| req, _ := json.Marshal(DecompressReq{ | |||||
| SourceFile: sourceFile, | |||||
| DestPath: destPath, | |||||
| }) | |||||
| err := labelmsg.SendDecompressAttachToLabelOBS(string(req)) | |||||
| if err != nil { | |||||
| log.Error("SendDecompressTask to labelsystem (%s) failed:%s", sourceFile, err.Error()) | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,276 @@ | |||||
| package urchin | |||||
| import ( | |||||
| "context" | |||||
| "encoding/json" | |||||
| "errors" | |||||
| "io/ioutil" | |||||
| "net/url" | |||||
| "strconv" | |||||
| "strings" | |||||
| "code.gitea.io/gitea/modules/urfs_client/config" | |||||
| urfs "code.gitea.io/gitea/modules/urfs_client/dfstore" | |||||
| ) | |||||
| type Urchinfs interface { | |||||
| //// schedule source dataset to target peer | |||||
| //ScheduleDataToPeer(sourceUrl, destPeerHost string) (*PeerResult, error) | |||||
| // | |||||
| //// check schedule data to peer task status | |||||
| //CheckScheduleTaskStatus(sourceUrl, destPeerHost string) (*PeerResult, error) | |||||
| ScheduleDataToPeerByKey(endpoint, bucketName, objectKey, destPeerHost string) (*PeerResult, error) | |||||
| CheckScheduleTaskStatusByKey(endpoint, bucketName, objectKey, destPeerHost string) (*PeerResult, error) | |||||
| } | |||||
| type urchinfs struct { | |||||
| // Initialize default urfs config. | |||||
| cfg *config.DfstoreConfig | |||||
| } | |||||
| // New urchinfs instance. | |||||
| func New() Urchinfs { | |||||
| urfs := &urchinfs{ | |||||
| cfg: config.NewDfstore(), | |||||
| } | |||||
| return urfs | |||||
| } | |||||
| const ( | |||||
| // UrfsScheme if the scheme of object storage. | |||||
| UrfsScheme = "urfs" | |||||
| ) | |||||
| /* | |||||
| func (urfs *urchinfs) ScheduleDataToPeer(sourceUrl, destPeerHost string) (*PeerResult, error) { | |||||
| ctx, cancel := context.WithCancel(context.Background()) | |||||
| defer cancel() | |||||
| if err := urfs.cfg.Validate(); err != nil { | |||||
| return nil, err | |||||
| } | |||||
| if err := validateSchedulelArgs(sourceUrl, destPeerHost); err != nil { | |||||
| return nil, err | |||||
| } | |||||
| // Copy object storage to local file. | |||||
| endpoint, bucketName, objectKey, err := parseUrfsURL(sourceUrl) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| peerResult, err := processScheduleDataToPeer(ctx, urfs.cfg, endpoint, bucketName, objectKey, destPeerHost) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return peerResult, err | |||||
| } | |||||
| */ | |||||
| func (urfs *urchinfs) ScheduleDataToPeerByKey(endpoint, bucketName, objectKey, destPeerHost string) (*PeerResult, error) { | |||||
| ctx, cancel := context.WithCancel(context.Background()) | |||||
| defer cancel() | |||||
| peerResult, err := processScheduleDataToPeer(ctx, urfs.cfg, endpoint, bucketName, objectKey, destPeerHost) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return peerResult, err | |||||
| } | |||||
| /* | |||||
| func (urfs *urchinfs) CheckScheduleTaskStatus(sourceUrl, destPeerHost string) (*PeerResult, error) { | |||||
| ctx, cancel := context.WithCancel(context.Background()) | |||||
| defer cancel() | |||||
| if err := urfs.cfg.Validate(); err != nil { | |||||
| return nil, err | |||||
| } | |||||
| if err := validateSchedulelArgs(sourceUrl, destPeerHost); err != nil { | |||||
| return nil, err | |||||
| } | |||||
| // Copy object storage to local file. | |||||
| endpoint, bucketName, objectKey, err := parseUrfsURL(sourceUrl) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| peerResult, err := processCheckScheduleTaskStatus(ctx, urfs.cfg, endpoint, bucketName, objectKey, destPeerHost) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return peerResult, err | |||||
| } | |||||
| */ | |||||
| func (urfs *urchinfs) CheckScheduleTaskStatusByKey(endpoint, bucketName, objectKey, destPeerHost string) (*PeerResult, error) { | |||||
| ctx, cancel := context.WithCancel(context.Background()) | |||||
| defer cancel() | |||||
| peerResult, err := processCheckScheduleTaskStatus(ctx, urfs.cfg, endpoint, bucketName, objectKey, destPeerHost) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return peerResult, err | |||||
| } | |||||
| // isUrfsURL determines whether the raw url is urfs url. | |||||
| func isUrfsURL(rawURL string) bool { | |||||
| u, err := url.ParseRequestURI(rawURL) | |||||
| if err != nil { | |||||
| return false | |||||
| } | |||||
| if u.Scheme != UrfsScheme || u.Host == "" || u.Path == "" { | |||||
| return false | |||||
| } | |||||
| return true | |||||
| } | |||||
| // Validate copy arguments. | |||||
| func validateSchedulelArgs(sourceUrl, destPeer string) error { | |||||
| if !isUrfsURL(sourceUrl) { | |||||
| return errors.New("source url should be urfs:// protocol") | |||||
| } | |||||
| return nil | |||||
| } | |||||
| /* | |||||
| // Parse object storage url. eg: urfs://源数据$endpoint/源数据$bucket/源数据filepath | |||||
| func parseUrfsURL(rawURL string) (string, string, string, error) { | |||||
| u, err := url.ParseRequestURI(rawURL) | |||||
| if err != nil { | |||||
| return "", "", "", err | |||||
| } | |||||
| if u.Scheme != UrfsScheme { | |||||
| return "", "", "", fmt.Errorf("invalid scheme, e.g. %s://endpoint/bucket_name/object_key", UrfsScheme) | |||||
| } | |||||
| if u.Host == "" { | |||||
| return "", "", "", errors.New("empty endpoint name") | |||||
| } | |||||
| if u.Path == "" { | |||||
| return "", "", "", errors.New("empty object path") | |||||
| } | |||||
| bucket, key, found := strings.Cut(strings.Trim(u.Path, "/"), "/") | |||||
| if found == false { | |||||
| return "", "", "", errors.New("invalid bucket and object key " + u.Path) | |||||
| } | |||||
| return u.Host, bucket, key, nil | |||||
| } | |||||
| */ | |||||
| // Schedule object storage to peer. | |||||
| func processScheduleDataToPeer(ctx context.Context, cfg *config.DfstoreConfig, endpoint, bucketName, objectKey, dstPeer string) (*PeerResult, error) { | |||||
| dfs := urfs.New(cfg.Endpoint) | |||||
| meta, err := dfs.GetUrfsMetadataWithContext(ctx, &urfs.GetUrfsMetadataInput{ | |||||
| Endpoint: endpoint, | |||||
| BucketName: bucketName, | |||||
| ObjectKey: objectKey, | |||||
| DstPeer: dstPeer, | |||||
| }) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| reader, err := dfs.GetUrfsWithContext(ctx, &urfs.GetUrfsInput{ | |||||
| Endpoint: endpoint, | |||||
| BucketName: bucketName, | |||||
| ObjectKey: objectKey, | |||||
| DstPeer: dstPeer, | |||||
| }) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| defer reader.Close() | |||||
| body, err := ioutil.ReadAll(reader) | |||||
| var peerResult PeerResult | |||||
| if err == nil { | |||||
| err = json.Unmarshal((body), &peerResult) | |||||
| } | |||||
| peerResult.SignedUrl = strings.ReplaceAll(peerResult.SignedUrl, "\\u0026", "&") | |||||
| fileContentLength, err := strconv.ParseInt(peerResult.ContentLength, 10, 64) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| if fileContentLength != meta.ContentLength { | |||||
| return nil, errors.New("content length inconsistent with meta") | |||||
| } | |||||
| return &peerResult, err | |||||
| } | |||||
| // check schedule task status. | |||||
| func processCheckScheduleTaskStatus(ctx context.Context, cfg *config.DfstoreConfig, endpoint, bucketName, objectKey, dstPeer string) (*PeerResult, error) { | |||||
| dfs := urfs.New(cfg.Endpoint) | |||||
| meta, err := dfs.GetUrfsMetadataWithContext(ctx, &urfs.GetUrfsMetadataInput{ | |||||
| Endpoint: endpoint, | |||||
| BucketName: bucketName, | |||||
| ObjectKey: objectKey, | |||||
| DstPeer: dstPeer, | |||||
| }) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| reader, err := dfs.GetUrfsStatusWithContext(ctx, &urfs.GetUrfsInput{ | |||||
| Endpoint: endpoint, | |||||
| BucketName: bucketName, | |||||
| ObjectKey: objectKey, | |||||
| DstPeer: dstPeer, | |||||
| }) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| defer reader.Close() | |||||
| body, err := ioutil.ReadAll(reader) | |||||
| var peerResult PeerResult | |||||
| if err == nil { | |||||
| err = json.Unmarshal((body), &peerResult) | |||||
| } | |||||
| peerResult.SignedUrl = strings.ReplaceAll(peerResult.SignedUrl, "\\u0026", "&") | |||||
| fileContentLength, err := strconv.ParseInt(peerResult.ContentLength, 10, 64) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| if fileContentLength != meta.ContentLength { | |||||
| return nil, err | |||||
| } | |||||
| return &peerResult, err | |||||
| } | |||||
| type PeerResult struct { | |||||
| ContentType string `json:"Content-Type"` | |||||
| ContentLength string `json:"Content-Length"` | |||||
| SignedUrl string | |||||
| DataRoot string | |||||
| DataPath string | |||||
| DataEndpoint string | |||||
| StatusCode int | |||||
| StatusMsg string | |||||
| TaskID string | |||||
| } | |||||
| @@ -265,8 +265,8 @@ page_dev_yunlao_desc3=China computing power network (C²NET) phase I can realize | |||||
| page_dev_yunlao_desc4=Developers can freely select the corresponding computing resources according to the use needs, and can test the adaptability, performance, stability, etc. of the model in different hardware environments. | page_dev_yunlao_desc4=Developers can freely select the corresponding computing resources according to the use needs, and can test the adaptability, performance, stability, etc. of the model in different hardware environments. | ||||
| page_dev_yunlao_desc5=If your model requires more computing resources, you can also apply for it separately. | page_dev_yunlao_desc5=If your model requires more computing resources, you can also apply for it separately. | ||||
| page_dev_yunlao_apply=Apply Separately | page_dev_yunlao_apply=Apply Separately | ||||
| c2net_title=China Computing Network | |||||
| c2net_desc=Extensive access to intelligent computing centers and supercomputing centers across the country to provide users with free computing resources. | |||||
| c2net_title=China Computing NET(C²NET) | |||||
| c2net_desc=Extensive access to intelligent computing centers, supercomputing centers and big data centers across the country to provide users with free computing resources. | |||||
| c2net_center=Center | c2net_center=Center | ||||
| search=Search | search=Search | ||||
| search_repo=Repository | search_repo=Repository | ||||
| @@ -289,6 +289,7 @@ provide_resoure = Computing resources of CPU/GPU/NPU are provided freely for var | |||||
| activity = Activity | activity = Activity | ||||
| no_events = There are no events related | no_events = There are no events related | ||||
| or_t = or | or_t = or | ||||
| powerdby=Powered_by Pengcheng CloudBrain、China Computing NET(C²NET)、 | |||||
| [explore] | [explore] | ||||
| repos = Repositories | repos = Repositories | ||||
| @@ -1217,7 +1218,8 @@ cloudbrain.benchmark.evaluate_train=Train Script | |||||
| cloudbrain.benchmark.evaluate_test=Test Script | cloudbrain.benchmark.evaluate_test=Test Script | ||||
| cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"Target detection","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"Target re-identification","second":[{"id":1,"value":"Vehicle re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"Image-based person re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"Multi-target tracking","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} | cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"Target detection","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"Target re-identification","second":[{"id":1,"value":"Vehicle re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"Image-based person re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"Multi-target tracking","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} | ||||
| cloudbrain.morethanonejob=You already have a running or waiting task, create it after that task is over. | cloudbrain.morethanonejob=You already have a running or waiting task, create it after that task is over. | ||||
| cloudbrain.morethanonejob1=You have created an <span style="color:rgba(242, 113, 28, 1);"> equivalent task </span> that is waiting or running, please wait for the task to finish before creating it. | |||||
| cloudbrain.morethanonejob2=You can view all your Cloud Brain tasks in <a href="/cloudbrains" target="_blank"> Home > Cloudbrain Task </a>. | |||||
| modelarts.infer_job_model = Model | modelarts.infer_job_model = Model | ||||
| modelarts.infer_job_model_file = Model File | modelarts.infer_job_model_file = Model File | ||||
| @@ -1227,6 +1229,9 @@ modelarts.infer_job.select_model = Select Model | |||||
| modelarts.infer_job.boot_file_helper=The startup file is the entry file for your program execution and must end in.py.Such as inference.py, main.py, example/inference.py, case/main.py. | modelarts.infer_job.boot_file_helper=The startup file is the entry file for your program execution and must end in.py.Such as inference.py, main.py, example/inference.py, case/main.py. | ||||
| modelarts.infer_job.tooltip = The model has been deleted and cannot be viewed. | modelarts.infer_job.tooltip = The model has been deleted and cannot be viewed. | ||||
| modelarts.download_log=Download log file | modelarts.download_log=Download log file | ||||
| modelarts.log_file = Log file | |||||
| modelarts.fullscreen_log_file = View in full screen | |||||
| modelarts.exit_full_screen = Exit fullscreen | |||||
| modelarts.no_node_right = The value of 'Amount of Compute Node' is wrong, you have no right to use the current value of 'Amount of Compute Node'. | modelarts.no_node_right = The value of 'Amount of Compute Node' is wrong, you have no right to use the current value of 'Amount of Compute Node'. | ||||
| @@ -3218,7 +3223,7 @@ view_sample = View sample | |||||
| inference_output_path_rule = The inference output path is stored in the run parameter result_url. | inference_output_path_rule = The inference output path is stored in the run parameter result_url. | ||||
| model_file_path_rule=The model file location is stored in the run parameter ckpt_url | model_file_path_rule=The model file location is stored in the run parameter ckpt_url | ||||
| model_file_postfix_rule = The supported format of the model file is [ckpt, pb, h5, json, pkl, pth, t7, pdparams, onnx, pbtxt, keras, mlmodel, cfg, pt] | model_file_postfix_rule = The supported format of the model file is [ckpt, pb, h5, json, pkl, pth, t7, pdparams, onnx, pbtxt, keras, mlmodel, cfg, pt] | ||||
| model_convert_postfix_rule = The supported format of the model file is [.pth, .pkl, .onnx, .mindir, .ckpt, .pb] | |||||
| model_convert_postfix_rule = The supported format of the model file is [.pth, .pkl, .onnx, .mindir, .ckpt, .pb, .pdmodel, .pdiparams, .params, .json] | |||||
| delete_task = Delete task | delete_task = Delete task | ||||
| task_delete_confirm = Are you sure you want to delete this task? Once this task is deleted, it cannot be recovered. | task_delete_confirm = Are you sure you want to delete this task? Once this task is deleted, it cannot be recovered. | ||||
| operate_confirm = confirm | operate_confirm = confirm | ||||
| @@ -267,8 +267,8 @@ page_dev_yunlao_desc3=中国算力网(C²NET)一期可实现不同人工智 | |||||
| page_dev_yunlao_desc4=开发者可以根据使用需求,自由选择相应计算资源,可以测试模型在不同硬件环境下的适配能力、性能、稳定性等。 | page_dev_yunlao_desc4=开发者可以根据使用需求,自由选择相应计算资源,可以测试模型在不同硬件环境下的适配能力、性能、稳定性等。 | ||||
| page_dev_yunlao_desc5=如果您的模型需要更多的计算资源,也可以单独申请。 | page_dev_yunlao_desc5=如果您的模型需要更多的计算资源,也可以单独申请。 | ||||
| page_dev_yunlao_apply=单独申请 | page_dev_yunlao_apply=单独申请 | ||||
| c2net_title=中国算力网 | |||||
| c2net_desc=广泛接入全国各地智算中心、超算中心,为用户提供免费算力资源 | |||||
| c2net_title=中国算力网(C²NET) | |||||
| c2net_desc=广泛接入全国各地智算中心、超算中心与大数据中心等,为用户提供免费算力资源 | |||||
| c2net_center=中心 | c2net_center=中心 | ||||
| search=搜索 | search=搜索 | ||||
| search_repo=项目 | search_repo=项目 | ||||
| @@ -292,6 +292,7 @@ create_pro = 创建项目 | |||||
| activity = 活动 | activity = 活动 | ||||
| no_events = 还没有与您相关的活动 | no_events = 还没有与您相关的活动 | ||||
| or_t = 或 | or_t = 或 | ||||
| powerdby=Powered_by 鹏城实验室云脑、中国算力网(C²NET)、 | |||||
| [explore] | [explore] | ||||
| @@ -1231,6 +1232,8 @@ cloudbrain.benchmark.evaluate_test=测试程序 | |||||
| cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"目标检测","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"目标重识别","second":[{"id":1,"value":"车辆重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"基于图像的行人重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"多目标跟踪","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} | cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"目标检测","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"目标重识别","second":[{"id":1,"value":"车辆重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"基于图像的行人重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"多目标跟踪","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} | ||||
| cloudbrain.benchmark.model.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"目标检测","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"目标重识别","second":[{"id":1,"value":"车辆重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"基于图像的行人重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"多目标跟踪","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} | cloudbrain.benchmark.model.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"目标检测","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"目标重识别","second":[{"id":1,"value":"车辆重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"基于图像的行人重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"多目标跟踪","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} | ||||
| cloudbrain.morethanonejob=您已经创建了一个正在等待或运行中的同类任务,请等待任务结束再创建。 | cloudbrain.morethanonejob=您已经创建了一个正在等待或运行中的同类任务,请等待任务结束再创建。 | ||||
| cloudbrain.morethanonejob1=您已经有 <span style="color:rgba(242, 113, 28, 1);">同类任务</span> 正在等待或运行中,请等待任务结束再创建; | |||||
| cloudbrain.morethanonejob2=可以在 “<a href="/cloudbrains" target="_blank" >个人中心 > 云脑任务</a>” 查看您所有的云脑任务。 | |||||
| modelarts.infer_job_model = 模型名称 | modelarts.infer_job_model = 模型名称 | ||||
| modelarts.infer_job_model_file = 模型文件 | modelarts.infer_job_model_file = 模型文件 | ||||
| @@ -1240,6 +1243,9 @@ modelarts.infer_job.select_model = 选择模型 | |||||
| modelarts.infer_job.boot_file_helper=启动文件是您程序执行的入口文件,必须是以.py结尾的文件。比如inference.py、main.py、example/inference.py、case/main.py。 | modelarts.infer_job.boot_file_helper=启动文件是您程序执行的入口文件,必须是以.py结尾的文件。比如inference.py、main.py、example/inference.py、case/main.py。 | ||||
| modelarts.infer_job.tooltip = 该模型已删除,无法查看。 | modelarts.infer_job.tooltip = 该模型已删除,无法查看。 | ||||
| modelarts.download_log=下载日志文件 | modelarts.download_log=下载日志文件 | ||||
| modelarts.log_file=日志文件 | |||||
| modelarts.fullscreen_log_file=全屏查看 | |||||
| modelarts.exit_full_screen=退出全屏 | |||||
| modelarts.no_node_right = 计算节点数的值配置错误,您没有权限使用当前配置的计算节点数。 | modelarts.no_node_right = 计算节点数的值配置错误,您没有权限使用当前配置的计算节点数。 | ||||
| @@ -3237,7 +3243,7 @@ view_sample = 查看样例 | |||||
| inference_output_path_rule = 推理输出路径存储在运行参数 result_url 中。 | inference_output_path_rule = 推理输出路径存储在运行参数 result_url 中。 | ||||
| model_file_path_rule = 模型文件位置存储在运行参数 ckpt_url 中。 | model_file_path_rule = 模型文件位置存储在运行参数 ckpt_url 中。 | ||||
| model_file_postfix_rule = 模型文件支持的格式为 [ckpt, pb, h5, json, pkl, pth, t7, pdparams, onnx, pbtxt, keras, mlmodel, cfg, pt] | model_file_postfix_rule = 模型文件支持的格式为 [ckpt, pb, h5, json, pkl, pth, t7, pdparams, onnx, pbtxt, keras, mlmodel, cfg, pt] | ||||
| model_convert_postfix_rule = 模型文件支持的格式为 [.pth, .pkl, .onnx, .mindir, .ckpt, .pb] | |||||
| model_convert_postfix_rule = 模型文件支持的格式为 [.pth, .pkl, .onnx, .mindir, .ckpt, .pb, .pdmodel, .pdiparams, .params, .json] | |||||
| delete_task = 删除任务 | delete_task = 删除任务 | ||||
| task_delete_confirm = 你确认删除该任务么?此任务一旦删除不可恢复。 | task_delete_confirm = 你确认删除该任务么?此任务一旦删除不可恢复。 | ||||
| operate_confirm = 确定操作 | operate_confirm = 确定操作 | ||||
| @@ -98,6 +98,12 @@ func CloudBrains(ctx *context.Context) { | |||||
| ciTasks[i].CanDebug = true | ciTasks[i].CanDebug = true | ||||
| ciTasks[i].CanDel = true | ciTasks[i].CanDel = true | ||||
| ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource | ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource | ||||
| if ciTasks[i].Cloudbrain.Type == models.TypeC2Net { | |||||
| ciTasks[i].Cloudbrain.Spec.Cluster = models.C2NetCluster | |||||
| } else { | |||||
| ciTasks[i].Cloudbrain.Spec.Cluster = models.OpenICluster | |||||
| } | |||||
| } | } | ||||
| pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, getTotalPage(count, setting.UI.IssuePagingNum)) | pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, getTotalPage(count, setting.UI.IssuePagingNum)) | ||||
| @@ -627,6 +627,11 @@ func RegisterRoutes(m *macaron.Macaron) { | |||||
| m.Get("/hours_data", repo.GetCloudbrainsCreateHoursData) | m.Get("/hours_data", repo.GetCloudbrainsCreateHoursData) | ||||
| m.Get("/waitting_top_data", repo.GetWaittingTop) | m.Get("/waitting_top_data", repo.GetWaittingTop) | ||||
| m.Get("/running_top_data", repo.GetRunningTop) | m.Get("/running_top_data", repo.GetRunningTop) | ||||
| m.Get("/overview_resource", repo.GetCloudbrainResourceOverview) | |||||
| m.Get("/resource_usage_statistic", repo.GetDurationRateStatistic) | |||||
| m.Get("/resource_usage_rate_detail", repo.GetCloudbrainResourceUsageDetail) | |||||
| m.Get("/apitest_for_statistic", repo.CloudbrainDurationStatisticForTest) | |||||
| }) | }) | ||||
| }, operationReq) | }, operationReq) | ||||
| @@ -737,6 +742,7 @@ func RegisterRoutes(m *macaron.Macaron) { | |||||
| m.Get("/issues/search", repo.SearchIssues) | m.Get("/issues/search", repo.SearchIssues) | ||||
| m.Post("/migrate", reqToken(), bind(auth.MigrateRepoForm{}), repo.Migrate) | m.Post("/migrate", reqToken(), bind(auth.MigrateRepoForm{}), repo.Migrate) | ||||
| m.Post("/migrate/submit", reqToken(), bind(auth.MigrateRepoForm{}), repo.MigrateSubmit) | |||||
| m.Group("/specification", func() { | m.Group("/specification", func() { | ||||
| m.Get("", repo.GetResourceSpec) | m.Get("", repo.GetResourceSpec) | ||||
| @@ -248,6 +248,7 @@ func GetCloudBrainInferenceJob(ctx *context.APIContext) { | |||||
| "JobID": jobID, | "JobID": jobID, | ||||
| "JobStatus": job.Status, | "JobStatus": job.Status, | ||||
| "JobDuration": job.TrainJobDuration, | "JobDuration": job.TrainJobDuration, | ||||
| "StartTime": job.StartTime, | |||||
| }) | }) | ||||
| } | } | ||||
| @@ -517,6 +518,7 @@ func ModelSafetyGetLog(ctx *context.APIContext) { | |||||
| "Content": result.Content, | "Content": result.Content, | ||||
| "Lines": result.Lines, | "Lines": result.Lines, | ||||
| "CanLogDownload": isCanDownloadLog(ctx, job), | "CanLogDownload": isCanDownloadLog(ctx, job), | ||||
| "StartTime": job.StartTime, | |||||
| }) | }) | ||||
| } | } | ||||
| } | } | ||||
| @@ -648,7 +650,13 @@ func CloudbrainGetLog(ctx *context.APIContext) { | |||||
| startLine = 0 | startLine = 0 | ||||
| } | } | ||||
| } | } | ||||
| } else { | |||||
| if startLine > 0 { | |||||
| startLine += 1 | |||||
| endLine += 1 | |||||
| } | |||||
| } | } | ||||
| result = getLogFromModelDir(job.JobName, startLine, endLine, resultPath) | result = getLogFromModelDir(job.JobName, startLine, endLine, resultPath) | ||||
| if result == nil { | if result == nil { | ||||
| log.Error("GetJobLog failed: %v", err, ctx.Data["MsgID"]) | log.Error("GetJobLog failed: %v", err, ctx.Data["MsgID"]) | ||||
| @@ -671,6 +679,7 @@ func CloudbrainGetLog(ctx *context.APIContext) { | |||||
| "Content": content, | "Content": content, | ||||
| "Lines": result["Lines"], | "Lines": result["Lines"], | ||||
| "CanLogDownload": result["FileName"] != "", | "CanLogDownload": result["FileName"] != "", | ||||
| "StartTime": job.StartTime, | |||||
| } | } | ||||
| //result := CloudbrainGetLogByJobId(job.JobID, job.JobName) | //result := CloudbrainGetLogByJobId(job.JobID, job.JobName) | ||||
| ctx.JSON(http.StatusOK, re) | ctx.JSON(http.StatusOK, re) | ||||
| @@ -795,10 +804,10 @@ func getLogFromModelDir(jobName string, startLine int, endLine int, resultPath s | |||||
| line, error := r.ReadString('\n') | line, error := r.ReadString('\n') | ||||
| if error == io.EOF { | if error == io.EOF { | ||||
| if i >= startLine { | if i >= startLine { | ||||
| fileEndLine = i | |||||
| re = re + line | re = re + line | ||||
| count++ | count++ | ||||
| } | } | ||||
| fileEndLine = i + 1 | |||||
| log.Info("read file completed.") | log.Info("read file completed.") | ||||
| break | break | ||||
| } | } | ||||
| @@ -808,13 +817,12 @@ func getLogFromModelDir(jobName string, startLine int, endLine int, resultPath s | |||||
| } | } | ||||
| if error == nil { | if error == nil { | ||||
| if i >= startLine { | if i >= startLine { | ||||
| fileEndLine = i | |||||
| fileEndLine = i + 1 | |||||
| re = re + line | re = re + line | ||||
| count++ | count++ | ||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| fileEndLine = fileEndLine + 1 | |||||
| } else { | } else { | ||||
| log.Info("error:" + err.Error()) | log.Info("error:" + err.Error()) | ||||
| } | } | ||||
| @@ -4,6 +4,7 @@ import ( | |||||
| "fmt" | "fmt" | ||||
| "net/http" | "net/http" | ||||
| "net/url" | "net/url" | ||||
| "strconv" | |||||
| "strings" | "strings" | ||||
| "time" | "time" | ||||
| @@ -120,9 +121,6 @@ func GetOverviewDuration(ctx *context.Context) { | |||||
| recordBeginTime := recordCloudbrain[0].Cloudbrain.CreatedUnix | recordBeginTime := recordCloudbrain[0].Cloudbrain.CreatedUnix | ||||
| now := time.Now() | now := time.Now() | ||||
| endTime := now | endTime := now | ||||
| page := 1 | |||||
| pagesize := 1000 | |||||
| count := pagesize | |||||
| worker_server_num := 1 | worker_server_num := 1 | ||||
| cardNum := 1 | cardNum := 1 | ||||
| durationAllSum := int64(0) | durationAllSum := int64(0) | ||||
| @@ -138,54 +136,46 @@ func GetOverviewDuration(ctx *context.Context) { | |||||
| c2NetDuration := int64(0) | c2NetDuration := int64(0) | ||||
| cDCenterDuration := int64(0) | cDCenterDuration := int64(0) | ||||
| for count == pagesize && count != 0 { | |||||
| cloudbrains, _, err := models.CloudbrainAllStatic(&models.CloudbrainsOptions{ | |||||
| ListOptions: models.ListOptions{ | |||||
| Page: page, | |||||
| PageSize: pagesize, | |||||
| }, | |||||
| Type: models.TypeCloudBrainAll, | |||||
| BeginTimeUnix: int64(recordBeginTime), | |||||
| EndTimeUnix: endTime.Unix(), | |||||
| }) | |||||
| if err != nil { | |||||
| ctx.ServerError("Get cloudbrains failed:", err) | |||||
| return | |||||
| } | |||||
| models.LoadSpecs4CloudbrainInfo(cloudbrains) | |||||
| cloudbrains, _, err := models.CloudbrainAllStatic(&models.CloudbrainsOptions{ | |||||
| Type: models.TypeCloudBrainAll, | |||||
| BeginTimeUnix: int64(recordBeginTime), | |||||
| EndTimeUnix: endTime.Unix(), | |||||
| }) | |||||
| if err != nil { | |||||
| ctx.ServerError("Get cloudbrains failed:", err) | |||||
| return | |||||
| } | |||||
| models.LoadSpecs4CloudbrainInfo(cloudbrains) | |||||
| for _, cloudbrain := range cloudbrains { | |||||
| if cloudbrain.Cloudbrain.WorkServerNumber >= 1 { | |||||
| worker_server_num = cloudbrain.Cloudbrain.WorkServerNumber | |||||
| } else { | |||||
| worker_server_num = 1 | |||||
| } | |||||
| if cloudbrain.Cloudbrain.Spec == nil { | |||||
| cardNum = 1 | |||||
| } else { | |||||
| cardNum = cloudbrain.Cloudbrain.Spec.AccCardsNum | |||||
| } | |||||
| duration := cloudbrain.Duration | |||||
| durationSum := cloudbrain.Duration * int64(worker_server_num) * int64(cardNum) | |||||
| if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainOne { | |||||
| cloudBrainOneDuration += duration | |||||
| cloudBrainOneCardDuSum += durationSum | |||||
| } else if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainTwo { | |||||
| cloudBrainTwoDuration += duration | |||||
| cloudBrainTwoCardDuSum += durationSum | |||||
| } else if cloudbrain.Cloudbrain.Type == models.TypeC2Net { | |||||
| c2NetDuration += duration | |||||
| c2NetCardDuSum += durationSum | |||||
| } else if cloudbrain.Cloudbrain.Type == models.TypeCDCenter { | |||||
| cDCenterDuration += duration | |||||
| cDNetCardDuSum += durationSum | |||||
| } | |||||
| durationAllSum += duration | |||||
| cardDuSum += durationSum | |||||
| count = len(cloudbrains) | |||||
| page += 1 | |||||
| for _, cloudbrain := range cloudbrains { | |||||
| if cloudbrain.Cloudbrain.WorkServerNumber >= 1 { | |||||
| worker_server_num = cloudbrain.Cloudbrain.WorkServerNumber | |||||
| } else { | |||||
| worker_server_num = 1 | |||||
| } | |||||
| if cloudbrain.Cloudbrain.Spec == nil { | |||||
| cardNum = 1 | |||||
| } else { | |||||
| cardNum = cloudbrain.Cloudbrain.Spec.AccCardsNum | |||||
| } | |||||
| duration := cloudbrain.Duration | |||||
| durationSum := cloudbrain.Duration * int64(worker_server_num) * int64(cardNum) | |||||
| if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainOne { | |||||
| cloudBrainOneDuration += duration | |||||
| cloudBrainOneCardDuSum += durationSum | |||||
| } else if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainTwo { | |||||
| cloudBrainTwoDuration += duration | |||||
| cloudBrainTwoCardDuSum += durationSum | |||||
| } else if cloudbrain.Cloudbrain.Type == models.TypeC2Net { | |||||
| c2NetDuration += duration | |||||
| c2NetCardDuSum += durationSum | |||||
| } else if cloudbrain.Cloudbrain.Type == models.TypeCDCenter { | |||||
| cDCenterDuration += duration | |||||
| cDNetCardDuSum += durationSum | |||||
| } | } | ||||
| durationAllSum += duration | |||||
| cardDuSum += durationSum | |||||
| } | } | ||||
| ctx.JSON(http.StatusOK, map[string]interface{}{ | ctx.JSON(http.StatusOK, map[string]interface{}{ | ||||
| "cloudBrainOneCardDuSum": cloudBrainOneCardDuSum, | "cloudBrainOneCardDuSum": cloudBrainOneCardDuSum, | ||||
| @@ -532,6 +522,21 @@ func getPageDateCloudbrainInfo(dateCloudbrainInfo []DateCloudbrainInfo, page int | |||||
| } | } | ||||
| func getPageDateCloudbrainDuration(dateUsageStatistic []models.DateUsageStatistic, page int, pagesize int) []models.DateUsageStatistic { | |||||
| begin := (page - 1) * pagesize | |||||
| end := (page) * pagesize | |||||
| if begin > len(dateUsageStatistic)-1 { | |||||
| return nil | |||||
| } | |||||
| if end > len(dateUsageStatistic)-1 { | |||||
| return dateUsageStatistic[begin:] | |||||
| } else { | |||||
| return dateUsageStatistic[begin:end] | |||||
| } | |||||
| } | |||||
| func GetAllCloudbrainsPeriodDistribution(ctx *context.Context) { | func GetAllCloudbrainsPeriodDistribution(ctx *context.Context) { | ||||
| queryType := ctx.QueryTrim("type") | queryType := ctx.QueryTrim("type") | ||||
| beginTimeStr := ctx.QueryTrim("beginTime") | beginTimeStr := ctx.QueryTrim("beginTime") | ||||
| @@ -545,7 +550,7 @@ func GetAllCloudbrainsPeriodDistribution(ctx *context.Context) { | |||||
| recordBeginTime := time.Unix(int64(recordCloudbrain[0].Cloudbrain.CreatedUnix), 0) | recordBeginTime := time.Unix(int64(recordCloudbrain[0].Cloudbrain.CreatedUnix), 0) | ||||
| beginTime, endTime, err := getCloudbrainTimePeroid(ctx, recordBeginTime) | beginTime, endTime, err := getCloudbrainTimePeroid(ctx, recordBeginTime) | ||||
| if err != nil { | if err != nil { | ||||
| log.Error("Parameter is wrong", err) | |||||
| log.Error("getCloudbrainTimePeroid error:", err) | |||||
| ctx.Error(http.StatusBadRequest, ctx.Tr("repo.parameter_is_wrong")) | ctx.Error(http.StatusBadRequest, ctx.Tr("repo.parameter_is_wrong")) | ||||
| return | return | ||||
| } | } | ||||
| @@ -727,7 +732,7 @@ func GetCloudbrainsDetailData(ctx *context.Context) { | |||||
| keyword := strings.Trim(ctx.Query("q"), " ") | keyword := strings.Trim(ctx.Query("q"), " ") | ||||
| ciTasks, count, err := models.CloudbrainAll(&models.CloudbrainsOptions{ | |||||
| ciTasks, _, err := models.CloudbrainAll(&models.CloudbrainsOptions{ | |||||
| ListOptions: models.ListOptions{ | ListOptions: models.ListOptions{ | ||||
| Page: page, | Page: page, | ||||
| PageSize: pageSize, | PageSize: pageSize, | ||||
| @@ -742,8 +747,8 @@ func GetCloudbrainsDetailData(ctx *context.Context) { | |||||
| NeedRepoInfo: true, | NeedRepoInfo: true, | ||||
| BeginTimeUnix: int64(recordBeginTime), | BeginTimeUnix: int64(recordBeginTime), | ||||
| EndTimeUnix: endTime.Unix(), | EndTimeUnix: endTime.Unix(), | ||||
| AiCenter: aiCenter, | |||||
| NeedDeleteInfo: needDeleteInfo, | |||||
| // AiCenter: aiCenter, | |||||
| NeedDeleteInfo: needDeleteInfo, | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| ctx.ServerError("Get job failed:", err) | ctx.ServerError("Get job failed:", err) | ||||
| @@ -753,43 +758,45 @@ func GetCloudbrainsDetailData(ctx *context.Context) { | |||||
| nilTime := time.Time{} | nilTime := time.Time{} | ||||
| tasks := []models.TaskDetail{} | tasks := []models.TaskDetail{} | ||||
| for i, task := range ciTasks { | for i, task := range ciTasks { | ||||
| ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource | |||||
| var taskDetail models.TaskDetail | |||||
| taskDetail.ID = ciTasks[i].Cloudbrain.ID | |||||
| taskDetail.JobID = ciTasks[i].Cloudbrain.JobID | |||||
| taskDetail.JobName = ciTasks[i].JobName | |||||
| taskDetail.DisplayJobName = ciTasks[i].DisplayJobName | |||||
| taskDetail.Status = ciTasks[i].Status | |||||
| taskDetail.JobType = ciTasks[i].JobType | |||||
| taskDetail.CreatedUnix = ciTasks[i].Cloudbrain.CreatedUnix | |||||
| taskDetail.RunTime = ciTasks[i].Cloudbrain.TrainJobDuration | |||||
| taskDetail.StartTime = ciTasks[i].StartTime | |||||
| taskDetail.EndTime = ciTasks[i].EndTime | |||||
| taskDetail.ComputeResource = ciTasks[i].ComputeResource | |||||
| taskDetail.Type = ciTasks[i].Cloudbrain.Type | |||||
| taskDetail.UserName = ciTasks[i].User.Name | |||||
| taskDetail.RepoID = ciTasks[i].RepoID | |||||
| if ciTasks[i].Repo != nil { | |||||
| taskDetail.RepoName = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Name | |||||
| taskDetail.RepoAlias = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Alias | |||||
| } | |||||
| if ciTasks[i].Cloudbrain.WorkServerNumber >= 1 { | |||||
| taskDetail.WorkServerNum = int64(ciTasks[i].Cloudbrain.WorkServerNumber) | |||||
| } else { | |||||
| taskDetail.WorkServerNum = 1 | |||||
| } | |||||
| taskDetail.CardDuration = repo.GetCloudbrainCardDuration(ciTasks[i].Cloudbrain) | |||||
| taskDetail.WaitTime = repo.GetCloudbrainWaitTime(ciTasks[i].Cloudbrain) | |||||
| if aiCenter == "" || aiCenter == task.Cloudbrain.Spec.AiCenterCode { | |||||
| ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource | |||||
| var taskDetail models.TaskDetail | |||||
| taskDetail.ID = ciTasks[i].Cloudbrain.ID | |||||
| taskDetail.JobID = ciTasks[i].Cloudbrain.JobID | |||||
| taskDetail.JobName = ciTasks[i].JobName | |||||
| taskDetail.DisplayJobName = ciTasks[i].DisplayJobName | |||||
| taskDetail.Status = ciTasks[i].Status | |||||
| taskDetail.JobType = ciTasks[i].JobType | |||||
| taskDetail.CreatedUnix = ciTasks[i].Cloudbrain.CreatedUnix | |||||
| taskDetail.RunTime = ciTasks[i].Cloudbrain.TrainJobDuration | |||||
| taskDetail.StartTime = ciTasks[i].StartTime | |||||
| taskDetail.EndTime = ciTasks[i].EndTime | |||||
| taskDetail.ComputeResource = ciTasks[i].ComputeResource | |||||
| taskDetail.Type = ciTasks[i].Cloudbrain.Type | |||||
| taskDetail.UserName = ciTasks[i].User.Name | |||||
| taskDetail.RepoID = ciTasks[i].RepoID | |||||
| if ciTasks[i].Repo != nil { | |||||
| taskDetail.RepoName = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Name | |||||
| taskDetail.RepoAlias = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Alias | |||||
| } | |||||
| if ciTasks[i].Cloudbrain.WorkServerNumber >= 1 { | |||||
| taskDetail.WorkServerNum = int64(ciTasks[i].Cloudbrain.WorkServerNumber) | |||||
| } else { | |||||
| taskDetail.WorkServerNum = 1 | |||||
| } | |||||
| taskDetail.CardDuration = repo.GetCloudbrainCardDuration(ciTasks[i].Cloudbrain) | |||||
| taskDetail.WaitTime = repo.GetCloudbrainWaitTime(ciTasks[i].Cloudbrain) | |||||
| if ciTasks[i].Cloudbrain.DeletedAt != nilTime || ciTasks[i].Repo == nil { | |||||
| taskDetail.IsDelete = true | |||||
| } else { | |||||
| taskDetail.IsDelete = false | |||||
| if ciTasks[i].Cloudbrain.DeletedAt != nilTime || ciTasks[i].Repo == nil { | |||||
| taskDetail.IsDelete = true | |||||
| } else { | |||||
| taskDetail.IsDelete = false | |||||
| } | |||||
| taskDetail.Spec = ciTasks[i].Spec | |||||
| tasks = append(tasks, taskDetail) | |||||
| } | } | ||||
| taskDetail.Spec = ciTasks[i].Spec | |||||
| tasks = append(tasks, taskDetail) | |||||
| } | } | ||||
| count := int64(len(tasks)) | |||||
| pager := context.NewPagination(int(count), pageSize, page, getTotalPage(count, pageSize)) | pager := context.NewPagination(int(count), pageSize, page, getTotalPage(count, pageSize)) | ||||
| pager.SetDefaultParams(ctx) | pager.SetDefaultParams(ctx) | ||||
| pager.AddParam(ctx, "listType", "ListType") | pager.AddParam(ctx, "listType", "ListType") | ||||
| @@ -1403,3 +1410,424 @@ func getCloudbrainTimePeroid(ctx *context.Context, recordBeginTime time.Time) (t | |||||
| return beginTime, endTime, nil | return beginTime, endTime, nil | ||||
| } | } | ||||
| func GetCloudbrainResourceOverview(ctx *context.Context) { | |||||
| recordCloudbrainDuration, err := models.GetDurationRecordBeginTime() | |||||
| if err != nil { | |||||
| log.Error("Can not get GetDurationRecordBeginTime", err) | |||||
| return | |||||
| } | |||||
| recordBeginTime := recordCloudbrainDuration[0].CreatedUnix | |||||
| recordUpdateTime := time.Now().Unix() | |||||
| resourceQueues, err := models.GetCanUseCardInfo() | |||||
| if err != nil { | |||||
| log.Info("GetCanUseCardInfo err: %v", err) | |||||
| return | |||||
| } | |||||
| OpenIResourceDetail := []models.ResourceDetail{} | |||||
| C2NetResourceDetail := []models.ResourceDetail{} | |||||
| for _, resourceQueue := range resourceQueues { | |||||
| if resourceQueue.Cluster == models.OpenICluster { | |||||
| var resourceDetail models.ResourceDetail | |||||
| resourceDetail.QueueCode = resourceQueue.QueueCode | |||||
| resourceDetail.Cluster = resourceQueue.Cluster | |||||
| resourceDetail.AiCenterCode = resourceQueue.AiCenterCode | |||||
| resourceDetail.AiCenterName = resourceQueue.AiCenterName + "/" + resourceQueue.AiCenterCode | |||||
| resourceDetail.ComputeResource = resourceQueue.ComputeResource | |||||
| resourceDetail.AccCardType = resourceQueue.AccCardType + "(" + resourceQueue.ComputeResource + ")" | |||||
| resourceDetail.CardsTotalNum = resourceQueue.CardsTotalNum | |||||
| resourceDetail.IsAutomaticSync = resourceQueue.IsAutomaticSync | |||||
| OpenIResourceDetail = append(OpenIResourceDetail, resourceDetail) | |||||
| } | |||||
| if resourceQueue.Cluster == models.C2NetCluster { | |||||
| var resourceDetail models.ResourceDetail | |||||
| resourceDetail.QueueCode = resourceQueue.QueueCode | |||||
| resourceDetail.Cluster = resourceQueue.Cluster | |||||
| resourceDetail.AiCenterCode = resourceQueue.AiCenterCode | |||||
| resourceDetail.AiCenterName = resourceQueue.AiCenterName + "/" + resourceQueue.AiCenterCode | |||||
| resourceDetail.ComputeResource = resourceQueue.ComputeResource | |||||
| resourceDetail.AccCardType = resourceQueue.AccCardType + "(" + resourceQueue.ComputeResource + ")" | |||||
| resourceDetail.CardsTotalNum = resourceQueue.CardsTotalNum | |||||
| resourceDetail.IsAutomaticSync = resourceQueue.IsAutomaticSync | |||||
| C2NetResourceDetail = append(C2NetResourceDetail, resourceDetail) | |||||
| } | |||||
| } | |||||
| openIResourceNum := make(map[string]map[string]int) | |||||
| for _, openIResourceDetail := range OpenIResourceDetail { | |||||
| if _, ok := openIResourceNum[openIResourceDetail.AiCenterName]; !ok { | |||||
| openIResourceNum[openIResourceDetail.AiCenterName] = make(map[string]int) | |||||
| } | |||||
| if _, ok := openIResourceNum[openIResourceDetail.AiCenterName][openIResourceDetail.AccCardType]; !ok { | |||||
| openIResourceNum[openIResourceDetail.AiCenterName][openIResourceDetail.AccCardType] = openIResourceDetail.CardsTotalNum | |||||
| } else { | |||||
| openIResourceNum[openIResourceDetail.AiCenterName][openIResourceDetail.AccCardType] += openIResourceDetail.CardsTotalNum | |||||
| } | |||||
| } | |||||
| c2NetResourceNum := make(map[string]map[string]int) | |||||
| for _, c2NetResourceDetail := range C2NetResourceDetail { | |||||
| if _, ok := c2NetResourceNum[c2NetResourceDetail.AiCenterName]; !ok { | |||||
| c2NetResourceNum[c2NetResourceDetail.AiCenterName] = make(map[string]int) | |||||
| } | |||||
| if _, ok := c2NetResourceNum[c2NetResourceDetail.AiCenterName][c2NetResourceDetail.AccCardType]; !ok { | |||||
| c2NetResourceNum[c2NetResourceDetail.AiCenterName][c2NetResourceDetail.AccCardType] = c2NetResourceDetail.CardsTotalNum | |||||
| } else { | |||||
| c2NetResourceNum[c2NetResourceDetail.AiCenterName][c2NetResourceDetail.AccCardType] += c2NetResourceDetail.CardsTotalNum | |||||
| } | |||||
| } | |||||
| ctx.JSON(http.StatusOK, map[string]interface{}{ | |||||
| "openI": openIResourceNum, | |||||
| "c2Net": c2NetResourceNum, | |||||
| "recordUpdateTime": recordUpdateTime, | |||||
| "recordBeginTime": recordBeginTime, | |||||
| }) | |||||
| } | |||||
| func GetCloudbrainResourceUsageDetail(ctx *context.Context) { | |||||
| aiCenterCode := ctx.QueryTrim("aiCenterCode") | |||||
| if aiCenterCode == "" { | |||||
| aiCenterCode = models.AICenterOfCloudBrainOne | |||||
| } | |||||
| beginTime, endTime := getBeginAndEndTime(ctx) | |||||
| dayCloudbrainDuration, count, err := getDayCloudbrainDuration(beginTime, endTime, aiCenterCode) | |||||
| if err != nil { | |||||
| log.Error("Can not query dayCloudbrainDuration.", err) | |||||
| return | |||||
| } | |||||
| hourCloudbrainDuration, err := getHourCloudbrainDuration(beginTime, endTime, aiCenterCode) | |||||
| if err != nil { | |||||
| log.Error("Can not query hourCloudbrainDuration.", err) | |||||
| return | |||||
| } | |||||
| page := ctx.QueryInt("page") | |||||
| if page <= 0 { | |||||
| page = 1 | |||||
| } | |||||
| pagesize := ctx.QueryInt("pagesize") | |||||
| if pagesize <= 0 { | |||||
| pagesize = 36500 | |||||
| } | |||||
| pageDateCloudbrainDuration := getPageDateCloudbrainDuration(dayCloudbrainDuration, page, pagesize) | |||||
| ctx.JSON(http.StatusOK, map[string]interface{}{ | |||||
| "totalCount": count, | |||||
| "pageDateCloudbrainDuration": pageDateCloudbrainDuration, | |||||
| "hourCloudbrainDuration": hourCloudbrainDuration, | |||||
| }) | |||||
| } | |||||
| func GetDurationRateStatistic(ctx *context.Context) { | |||||
| beginTime, endTime := getBeginAndEndTime(ctx) | |||||
| OpenIDurationRate, C2NetDurationRate, totalUsageRate := getDurationStatistic(beginTime, endTime) | |||||
| ctx.JSON(http.StatusOK, map[string]interface{}{ | |||||
| "openIDurationRate": OpenIDurationRate, | |||||
| "c2NetDurationRate": C2NetDurationRate, | |||||
| "totalUsageRate": totalUsageRate, | |||||
| }) | |||||
| } | |||||
| func CloudbrainDurationStatisticForTest(ctx *context.Context) { | |||||
| repo.CloudbrainDurationStatisticHour() | |||||
| ctx.JSON(http.StatusOK, map[string]interface{}{ | |||||
| "message": 0, | |||||
| }) | |||||
| } | |||||
| func getBeginAndEndTime(ctx *context.Context) (time.Time, time.Time) { | |||||
| queryType := ctx.QueryTrim("type") | |||||
| now := time.Now() | |||||
| beginTimeStr := ctx.QueryTrim("beginTime") | |||||
| endTimeStr := ctx.QueryTrim("endTime") | |||||
| var beginTime time.Time | |||||
| var endTime time.Time | |||||
| var err error | |||||
| if queryType != "" { | |||||
| if queryType == "all" { | |||||
| recordCloudbrainDuration, err := models.GetDurationRecordBeginTime() | |||||
| if err != nil { | |||||
| log.Error("Can not get GetDurationRecordBeginTime", err) | |||||
| ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err")) | |||||
| return beginTime, endTime | |||||
| } | |||||
| brainRecordBeginTime := recordCloudbrainDuration[0].CreatedUnix.AsTime() | |||||
| beginTime = brainRecordBeginTime | |||||
| endTime = now | |||||
| } else if queryType == "today" { | |||||
| beginTime = now.AddDate(0, 0, 0) | |||||
| beginTime = time.Date(beginTime.Year(), beginTime.Month(), beginTime.Day(), 0, 0, 0, 0, now.Location()) | |||||
| endTime = now | |||||
| } else if queryType == "yesterday" { | |||||
| beginTime = now.AddDate(0, 0, -1) | |||||
| beginTime = time.Date(beginTime.Year(), beginTime.Month(), beginTime.Day(), 0, 0, 0, 0, now.Location()) | |||||
| endTime = time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, now.Location()) | |||||
| } else if queryType == "last_7day" { | |||||
| beginTime = now.AddDate(0, 0, -6) | |||||
| beginTime = time.Date(beginTime.Year(), beginTime.Month(), beginTime.Day(), 0, 0, 0, 0, now.Location()) | |||||
| endTime = now | |||||
| } else if queryType == "last_30day" { | |||||
| beginTime = now.AddDate(0, 0, -29) | |||||
| beginTime = time.Date(beginTime.Year(), beginTime.Month(), beginTime.Day(), 0, 0, 0, 0, now.Location()) | |||||
| endTime = now | |||||
| } else if queryType == "current_month" { | |||||
| endTime = now | |||||
| beginTime = time.Date(endTime.Year(), endTime.Month(), 1, 0, 0, 0, 0, now.Location()) | |||||
| } else if queryType == "current_year" { | |||||
| endTime = now | |||||
| beginTime = time.Date(endTime.Year(), 1, 1, 0, 0, 0, 0, now.Location()) | |||||
| } else if queryType == "last_month" { | |||||
| lastMonthTime := now.AddDate(0, -1, 0) | |||||
| beginTime = time.Date(lastMonthTime.Year(), lastMonthTime.Month(), 1, 0, 0, 0, 0, now.Location()) | |||||
| endTime = time.Date(now.Year(), now.Month(), 1, 0, 0, 0, 0, now.Location()) | |||||
| } | |||||
| } else { | |||||
| if beginTimeStr == "" || endTimeStr == "" { | |||||
| //如果查询类型和开始时间结束时间都未设置,按queryType=all处理 | |||||
| recordCloudbrainDuration, err := models.GetDurationRecordBeginTime() | |||||
| if err != nil { | |||||
| log.Error("Can not get recordCloudbrain", err) | |||||
| ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err")) | |||||
| return beginTime, endTime | |||||
| } | |||||
| brainRecordBeginTime := recordCloudbrainDuration[0].CreatedUnix.AsTime() | |||||
| beginTime = brainRecordBeginTime | |||||
| endTime = now | |||||
| } else { | |||||
| beginTime, err = time.ParseInLocation("2006-01-02", beginTimeStr, time.Local) | |||||
| if err != nil { | |||||
| log.Error("Can not ParseInLocation.", err) | |||||
| ctx.Error(http.StatusBadRequest, ctx.Tr("ParseInLocation_get_error")) | |||||
| return beginTime, endTime | |||||
| } | |||||
| endTime, err = time.ParseInLocation("2006-01-02", endTimeStr, time.Local) | |||||
| if err != nil { | |||||
| log.Error("Can not ParseInLocation.", err) | |||||
| ctx.Error(http.StatusBadRequest, ctx.Tr("ParseInLocation_get_error")) | |||||
| return beginTime, endTime | |||||
| } | |||||
| if endTime.After(time.Now()) { | |||||
| endTime = time.Now() | |||||
| } | |||||
| } | |||||
| } | |||||
| return beginTime, endTime | |||||
| } | |||||
| func getAiCenterUsageDuration(beginTime time.Time, endTime time.Time, cloudbrainStatistics []*models.CloudbrainDurationStatistic) (int, int, float64) { | |||||
| totalDuration := int(0) | |||||
| usageDuration := int(0) | |||||
| usageRate := float64(0) | |||||
| for _, cloudbrainStatistic := range cloudbrainStatistics { | |||||
| if int64(cloudbrainStatistic.CreatedUnix) >= beginTime.Unix() && int64(cloudbrainStatistic.CreatedUnix) < endTime.Unix() { | |||||
| totalDuration += cloudbrainStatistic.CardsTotalDuration | |||||
| usageDuration += cloudbrainStatistic.CardsUseDuration | |||||
| } | |||||
| } | |||||
| if totalDuration == 0 || usageDuration == 0 { | |||||
| usageRate = 0 | |||||
| } else { | |||||
| usageRate = float64(usageDuration) / float64(totalDuration) | |||||
| } | |||||
| return totalDuration, usageDuration, usageRate | |||||
| } | |||||
| func getDurationStatistic(beginTime time.Time, endTime time.Time) (models.DurationRateStatistic, models.DurationRateStatistic, float64) { | |||||
| OpenITotalDuration := make(map[string]int) | |||||
| OpenIUsageDuration := make(map[string]int) | |||||
| OpenIUsageRate := make(map[string]float64) | |||||
| C2NetTotalDuration := make(map[string]int) | |||||
| C2NetUsageDuration := make(map[string]int) | |||||
| OpenIDurationRate := models.DurationRateStatistic{} | |||||
| C2NetDurationRate := models.DurationRateStatistic{} | |||||
| cardDurationStatistics, err := models.GetCardDurationStatistics(&models.DurationStatisticOptions{ | |||||
| BeginTime: beginTime, | |||||
| EndTime: endTime, | |||||
| }) | |||||
| if err != nil { | |||||
| log.Error("GetCardDurationStatistics error:", err) | |||||
| return OpenIDurationRate, C2NetDurationRate, 0 | |||||
| } | |||||
| for _, cloudbrainStatistic := range cardDurationStatistics { | |||||
| if cloudbrainStatistic.Cluster == models.OpenICluster { | |||||
| if _, ok := OpenITotalDuration[cloudbrainStatistic.AiCenterName]; !ok { | |||||
| OpenITotalDuration[cloudbrainStatistic.AiCenterName] = cloudbrainStatistic.CardsTotalDuration | |||||
| } else { | |||||
| OpenITotalDuration[cloudbrainStatistic.AiCenterName] += cloudbrainStatistic.CardsTotalDuration | |||||
| } | |||||
| if _, ok := OpenIUsageDuration[cloudbrainStatistic.AiCenterName]; !ok { | |||||
| OpenIUsageDuration[cloudbrainStatistic.AiCenterName] = cloudbrainStatistic.CardsUseDuration | |||||
| } else { | |||||
| OpenIUsageDuration[cloudbrainStatistic.AiCenterName] += cloudbrainStatistic.CardsUseDuration | |||||
| } | |||||
| } | |||||
| if cloudbrainStatistic.Cluster == models.C2NetCluster { | |||||
| if _, ok := C2NetTotalDuration[cloudbrainStatistic.AiCenterName]; !ok { | |||||
| C2NetTotalDuration[cloudbrainStatistic.AiCenterName] = cloudbrainStatistic.CardsTotalDuration | |||||
| } else { | |||||
| C2NetTotalDuration[cloudbrainStatistic.AiCenterName] += cloudbrainStatistic.CardsTotalDuration | |||||
| } | |||||
| if _, ok := C2NetUsageDuration[cloudbrainStatistic.AiCenterName]; !ok { | |||||
| C2NetUsageDuration[cloudbrainStatistic.AiCenterName] = cloudbrainStatistic.CardsUseDuration | |||||
| } else { | |||||
| C2NetUsageDuration[cloudbrainStatistic.AiCenterName] += cloudbrainStatistic.CardsUseDuration | |||||
| } | |||||
| } | |||||
| } | |||||
| ResourceAiCenterRes, err := models.GetResourceAiCenters() | |||||
| if err != nil { | |||||
| log.Error("Can not get ResourceAiCenterRes.", err) | |||||
| return OpenIDurationRate, C2NetDurationRate, 0 | |||||
| } | |||||
| for _, v := range ResourceAiCenterRes { | |||||
| if cutString(v.AiCenterCode, 4) == cutString(models.AICenterOfCloudBrainOne, 4) { | |||||
| if _, ok := OpenIUsageDuration[v.AiCenterName]; !ok { | |||||
| OpenIUsageDuration[v.AiCenterName] = 0 | |||||
| } | |||||
| if _, ok := OpenITotalDuration[v.AiCenterName]; !ok { | |||||
| OpenITotalDuration[v.AiCenterName] = 0 | |||||
| } | |||||
| } else { | |||||
| if _, ok := C2NetUsageDuration[v.AiCenterName]; !ok { | |||||
| C2NetUsageDuration[v.AiCenterName] = 0 | |||||
| } | |||||
| } | |||||
| } | |||||
| totalCanUse := float64(0) | |||||
| totalUse := float64(0) | |||||
| totalUsageRate := float64(0) | |||||
| for k, v := range OpenITotalDuration { | |||||
| for i, j := range OpenIUsageDuration { | |||||
| if k == i { | |||||
| OpenIUsageRate[k] = float64(j) / float64(v) | |||||
| } | |||||
| } | |||||
| } | |||||
| for _, v := range OpenITotalDuration { | |||||
| totalCanUse += float64(v) | |||||
| } | |||||
| for _, v := range OpenIUsageRate { | |||||
| totalUse += float64(v) | |||||
| } | |||||
| if totalCanUse == 0 || totalUse == 0 { | |||||
| totalUsageRate = 0 | |||||
| } else { | |||||
| totalUsageRate = totalUse / totalCanUse | |||||
| } | |||||
| OpenIDurationRate.AiCenterTotalDurationStat = OpenITotalDuration | |||||
| OpenIDurationRate.AiCenterUsageDurationStat = OpenIUsageDuration | |||||
| OpenIDurationRate.UsageRate = OpenIUsageRate | |||||
| C2NetDurationRate.AiCenterTotalDurationStat = C2NetTotalDuration | |||||
| C2NetDurationRate.AiCenterUsageDurationStat = C2NetUsageDuration | |||||
| return OpenIDurationRate, C2NetDurationRate, totalUsageRate | |||||
| } | |||||
| func cutString(str string, lens int) string { | |||||
| if len(str) < lens { | |||||
| return str | |||||
| } | |||||
| return str[:lens] | |||||
| } | |||||
| func getDayCloudbrainDuration(beginTime time.Time, endTime time.Time, aiCenterCode string) ([]models.DateUsageStatistic, int, error) { | |||||
| now := time.Now() | |||||
| endTimeTemp := time.Date(endTime.Year(), endTime.Month(), endTime.Day(), 0, 0, 0, 0, now.Location()) | |||||
| if endTimeTemp.Equal(endTime) { | |||||
| endTimeTemp = endTimeTemp.AddDate(0, 0, -1) | |||||
| } | |||||
| cardDurationStatistics, err := models.GetCardDurationStatistics(&models.DurationStatisticOptions{ | |||||
| BeginTime: beginTime, | |||||
| EndTime: endTime, | |||||
| AiCenterCode: aiCenterCode, | |||||
| }) | |||||
| if err != nil { | |||||
| log.Error("GetCardDurationStatistics error:", err) | |||||
| return nil, 0, err | |||||
| } | |||||
| dayCloudbrainInfo := make([]models.DateUsageStatistic, 0) | |||||
| count := 0 | |||||
| for beginTime.Before(endTimeTemp) || beginTime.Equal(endTimeTemp) { | |||||
| TotalDuration, UsageDuration, UsageRate := getAiCenterUsageDuration(endTimeTemp, endTime, cardDurationStatistics) | |||||
| dayCloudbrainInfo = append(dayCloudbrainInfo, models.DateUsageStatistic{ | |||||
| Date: endTimeTemp.Format("2006/01/02"), | |||||
| UsageDuration: UsageDuration, | |||||
| TotalDuration: TotalDuration, | |||||
| UsageRate: UsageRate, | |||||
| }) | |||||
| endTime = endTimeTemp | |||||
| endTimeTemp = endTimeTemp.AddDate(0, 0, -1) | |||||
| if endTimeTemp.Before(beginTime) && beginTime.Before(endTime) { | |||||
| endTimeTemp = beginTime | |||||
| } | |||||
| count += 1 | |||||
| } | |||||
| return dayCloudbrainInfo, count, nil | |||||
| } | |||||
| func getHourCloudbrainDuration(beginTime time.Time, endTime time.Time, aiCenterCode string) (models.HourTimeStatistic, error) { | |||||
| hourTimeTotalDuration := make(map[string]int) | |||||
| hourTimeUsageDuration := make(map[string]int) | |||||
| hourTimeUsageRate := make(map[string]float64) | |||||
| hourTimeStatistic := models.HourTimeStatistic{} | |||||
| cardDurationStatistics, err := models.GetCardDurationStatistics(&models.DurationStatisticOptions{ | |||||
| BeginTime: beginTime, | |||||
| EndTime: endTime, | |||||
| }) | |||||
| if err != nil { | |||||
| log.Error("GetCardDurationStatistics error:", err) | |||||
| return hourTimeStatistic, err | |||||
| } | |||||
| for _, cloudbrainStatistic := range cardDurationStatistics { | |||||
| if cloudbrainStatistic.AiCenterCode == aiCenterCode { | |||||
| if _, ok := hourTimeTotalDuration[strconv.Itoa(cloudbrainStatistic.HourTime)]; !ok { | |||||
| hourTimeTotalDuration[strconv.Itoa(cloudbrainStatistic.HourTime)] = cloudbrainStatistic.CardsTotalDuration | |||||
| } else { | |||||
| hourTimeTotalDuration[strconv.Itoa(cloudbrainStatistic.HourTime)] += cloudbrainStatistic.CardsTotalDuration | |||||
| } | |||||
| if _, ok := hourTimeUsageDuration[strconv.Itoa(cloudbrainStatistic.HourTime)]; !ok { | |||||
| hourTimeUsageDuration[strconv.Itoa(cloudbrainStatistic.HourTime)] = cloudbrainStatistic.CardsUseDuration | |||||
| } else { | |||||
| hourTimeUsageDuration[strconv.Itoa(cloudbrainStatistic.HourTime)] += cloudbrainStatistic.CardsUseDuration | |||||
| } | |||||
| } | |||||
| } | |||||
| hourTimeList := []string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23"} | |||||
| for _, v := range hourTimeList { | |||||
| if _, ok := hourTimeUsageDuration[v]; !ok { | |||||
| hourTimeUsageDuration[v] = 0 | |||||
| } | |||||
| if _, ok := hourTimeTotalDuration[v]; !ok { | |||||
| hourTimeTotalDuration[v] = 0 | |||||
| } | |||||
| } | |||||
| for k, v := range hourTimeTotalDuration { | |||||
| for i, j := range hourTimeUsageDuration { | |||||
| if k == i { | |||||
| if v == 0 || j == 0 { | |||||
| hourTimeUsageRate[k] = 0 | |||||
| } else { | |||||
| hourTimeUsageRate[k] = float64(j) / float64(v) | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| hourTimeStatistic.HourTimeTotalDuration = hourTimeTotalDuration | |||||
| hourTimeStatistic.HourTimeUsageDuration = hourTimeUsageDuration | |||||
| hourTimeStatistic.HourTimeUsageRate = hourTimeUsageRate | |||||
| return hourTimeStatistic, nil | |||||
| } | |||||
| @@ -6,6 +6,8 @@ package repo | |||||
| import ( | import ( | ||||
| "bytes" | "bytes" | ||||
| "code.gitea.io/gitea/modules/task" | |||||
| "code.gitea.io/gitea/routers/response" | |||||
| "errors" | "errors" | ||||
| "fmt" | "fmt" | ||||
| "net/http" | "net/http" | ||||
| @@ -216,3 +218,146 @@ func handleMigrateError(ctx *context.APIContext, repoOwner *models.User, remoteA | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| func MigrateSubmit(ctx *context.APIContext, form auth.MigrateRepoForm) { | |||||
| log.Info("receive MigrateSubmit request") | |||||
| ctxUser, bizErr := checkContextUser(ctx, form.UID) | |||||
| if bizErr != nil { | |||||
| ctx.JSON(http.StatusOK, response.ResponseError(bizErr)) | |||||
| return | |||||
| } | |||||
| remoteAddr, err := form.ParseRemoteAddr(ctx.User) | |||||
| if err != nil { | |||||
| if models.IsErrInvalidCloneAddr(err) { | |||||
| addrErr := err.(models.ErrInvalidCloneAddr) | |||||
| switch { | |||||
| case addrErr.IsURLError: | |||||
| ctx.JSON(http.StatusOK, response.PARAM_ERROR) | |||||
| case addrErr.IsPermissionDenied: | |||||
| ctx.JSON(http.StatusOK, response.INSUFFICIENT_PERMISSION) | |||||
| case addrErr.IsInvalidPath: | |||||
| ctx.JSON(http.StatusOK, response.PARAM_ERROR) | |||||
| default: | |||||
| ctx.JSON(http.StatusOK, response.SYSTEM_ERROR) | |||||
| } | |||||
| } else { | |||||
| ctx.JSON(http.StatusOK, response.SYSTEM_ERROR) | |||||
| } | |||||
| return | |||||
| } | |||||
| var gitServiceType = api.PlainGitService | |||||
| u, err := url.Parse(form.CloneAddr) | |||||
| if err == nil && strings.EqualFold(u.Host, "github.com") { | |||||
| gitServiceType = api.GithubService | |||||
| } | |||||
| var opts = migrations.MigrateOptions{ | |||||
| OriginalURL: form.CloneAddr, | |||||
| GitServiceType: gitServiceType, | |||||
| CloneAddr: remoteAddr, | |||||
| RepoName: form.RepoName, | |||||
| Alias: form.Alias, | |||||
| Description: form.Description, | |||||
| Private: form.Private || setting.Repository.ForcePrivate, | |||||
| Mirror: form.Mirror, | |||||
| AuthUsername: form.AuthUsername, | |||||
| AuthPassword: form.AuthPassword, | |||||
| Wiki: form.Wiki, | |||||
| Issues: form.Issues, | |||||
| Milestones: form.Milestones, | |||||
| Labels: form.Labels, | |||||
| Comments: true, | |||||
| PullRequests: form.PullRequests, | |||||
| Releases: form.Releases, | |||||
| } | |||||
| if opts.Mirror { | |||||
| opts.Issues = false | |||||
| opts.Milestones = false | |||||
| opts.Labels = false | |||||
| opts.Comments = false | |||||
| opts.PullRequests = false | |||||
| opts.Releases = false | |||||
| } | |||||
| err = models.CheckCreateRepository(ctx.User, ctxUser, opts.RepoName, opts.Alias) | |||||
| if err != nil { | |||||
| handleMigrateError4Api(ctx, ctxUser, remoteAddr, err) | |||||
| return | |||||
| } | |||||
| err = task.MigrateRepository(ctx.User, ctxUser, opts) | |||||
| if err == nil { | |||||
| r := make(map[string]string) | |||||
| r["OpenIUrl"] = strings.TrimSuffix(setting.AppURL, "/") + "/" + ctxUser.Name + "/" + opts.RepoName | |||||
| r["OriginUrl"] = form.CloneAddr | |||||
| ctx.JSON(http.StatusOK, response.SuccessWithData(r)) | |||||
| return | |||||
| } | |||||
| handleMigrateError4Api(ctx, ctxUser, remoteAddr, err) | |||||
| } | |||||
| func checkContextUser(ctx *context.APIContext, uid int64) (*models.User, *response.BizError) { | |||||
| if uid == ctx.User.ID || uid == 0 { | |||||
| return ctx.User, nil | |||||
| } | |||||
| org, err := models.GetUserByID(uid) | |||||
| if models.IsErrUserNotExist(err) { | |||||
| return ctx.User, nil | |||||
| } | |||||
| if err != nil { | |||||
| return nil, response.SYSTEM_ERROR | |||||
| } | |||||
| // Check ownership of organization. | |||||
| if !org.IsOrganization() { | |||||
| return nil, nil | |||||
| } | |||||
| if !ctx.User.IsAdmin { | |||||
| canCreate, err := org.CanCreateOrgRepo(ctx.User.ID) | |||||
| if err != nil { | |||||
| return nil, response.NewBizError(err) | |||||
| } else if !canCreate { | |||||
| return nil, response.INSUFFICIENT_PERMISSION | |||||
| } | |||||
| } | |||||
| return org, nil | |||||
| } | |||||
| func handleMigrateError4Api(ctx *context.APIContext, repoOwner *models.User, remoteAddr string, err error) { | |||||
| switch { | |||||
| case models.IsErrRepoAlreadyExist(err): | |||||
| ctx.JSON(http.StatusOK, response.Error(3, "The repository with the same name already exists.")) | |||||
| case migrations.IsRateLimitError(err): | |||||
| ctx.JSON(http.StatusOK, response.ServerError("Remote visit addressed rate limitation.")) | |||||
| case migrations.IsTwoFactorAuthError(err): | |||||
| ctx.JSON(http.StatusOK, response.ServerError("Remote visit required two factors authentication.")) | |||||
| case models.IsErrReachLimitOfRepo(err): | |||||
| ctx.JSON(http.StatusOK, response.ServerError(fmt.Sprintf("You have already reached your limit of %d repositories.", repoOwner.MaxCreationLimit()))) | |||||
| case models.IsErrNameReserved(err): | |||||
| ctx.JSON(http.StatusOK, response.ServerError(fmt.Sprintf("The username '%s' is reserved.", err.(models.ErrNameReserved).Name))) | |||||
| case models.IsErrNameCharsNotAllowed(err): | |||||
| ctx.JSON(http.StatusOK, response.ServerError(fmt.Sprintf("The username '%s' contains invalid characters.", err.(models.ErrNameCharsNotAllowed).Name))) | |||||
| case models.IsErrNamePatternNotAllowed(err): | |||||
| ctx.JSON(http.StatusOK, response.ServerError(fmt.Sprintf("The pattern '%s' is not allowed in a username.", err.(models.ErrNamePatternNotAllowed).Pattern))) | |||||
| default: | |||||
| err = util.URLSanitizedError(err, remoteAddr) | |||||
| if strings.Contains(err.Error(), "Authentication failed") || | |||||
| strings.Contains(err.Error(), "Bad credentials") || | |||||
| strings.Contains(err.Error(), "could not read Username") { | |||||
| ctx.JSON(http.StatusOK, response.ServerError(fmt.Sprintf("Authentication failed: %v.", err))) | |||||
| } else if strings.Contains(err.Error(), "fatal:") { | |||||
| ctx.JSON(http.StatusOK, response.ServerError(fmt.Sprintf("Migration failed: %v.", err))) | |||||
| } else { | |||||
| ctx.JSON(http.StatusOK, response.ServerError(err.Error())) | |||||
| } | |||||
| } | |||||
| } | |||||
| func QueryRepoSatus(ctx *context.APIContext, form auth.MigrateRepoForm) { | |||||
| } | |||||
| @@ -12,6 +12,8 @@ import ( | |||||
| "strconv" | "strconv" | ||||
| "strings" | "strings" | ||||
| "code.gitea.io/gitea/modules/urfs_client/urchin" | |||||
| "code.gitea.io/gitea/modules/notification" | "code.gitea.io/gitea/modules/notification" | ||||
| "code.gitea.io/gitea/modules/grampus" | "code.gitea.io/gitea/modules/grampus" | ||||
| @@ -25,6 +27,7 @@ import ( | |||||
| "code.gitea.io/gitea/modules/storage" | "code.gitea.io/gitea/modules/storage" | ||||
| "code.gitea.io/gitea/modules/timeutil" | "code.gitea.io/gitea/modules/timeutil" | ||||
| routerRepo "code.gitea.io/gitea/routers/repo" | routerRepo "code.gitea.io/gitea/routers/repo" | ||||
| cloudbrainService "code.gitea.io/gitea/services/cloudbrain" | |||||
| ) | ) | ||||
| func GetModelArtsNotebook2(ctx *context.APIContext) { | func GetModelArtsNotebook2(ctx *context.APIContext) { | ||||
| @@ -49,6 +52,7 @@ func GetModelArtsNotebook2(ctx *context.APIContext) { | |||||
| "JobName": job.JobName, | "JobName": job.JobName, | ||||
| "JobStatus": job.Status, | "JobStatus": job.Status, | ||||
| "JobDuration": job.TrainJobDuration, | "JobDuration": job.TrainJobDuration, | ||||
| "StartTime": job.StartTime, | |||||
| }) | }) | ||||
| } | } | ||||
| @@ -169,17 +173,20 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) { | |||||
| if len(result.JobInfo.Tasks) > 0 { | if len(result.JobInfo.Tasks) > 0 { | ||||
| if len(result.JobInfo.Tasks[0].CenterID) > 0 && len(result.JobInfo.Tasks[0].CenterName) > 0 { | if len(result.JobInfo.Tasks[0].CenterID) > 0 && len(result.JobInfo.Tasks[0].CenterName) > 0 { | ||||
| job.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0] | job.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0] | ||||
| aiCenterName = result.JobInfo.Tasks[0].CenterName[0] | |||||
| // aiCenterName = result.JobInfo.Tasks[0].CenterName[0] | |||||
| aiCenterName = cloudbrainService.GetAiCenterShow(job.AiCenter, ctx.Context) | |||||
| } | } | ||||
| } | } | ||||
| } else { | } else { | ||||
| temp := strings.Split(job.AiCenter, "+") | |||||
| if len(temp) > 1 { | |||||
| aiCenterName = temp[1] | |||||
| } | |||||
| aiCenterName = cloudbrainService.GetAiCenterShow(job.AiCenter, ctx.Context) | |||||
| } | } | ||||
| if oldStatus != job.Status { | if oldStatus != job.Status { | ||||
| notification.NotifyChangeCloudbrainStatus(job, oldStatus) | notification.NotifyChangeCloudbrainStatus(job, oldStatus) | ||||
| if models.IsTrainJobTerminal(job.Status) { | |||||
| if len(result.JobInfo.Tasks[0].CenterID) == 1 { | |||||
| urchin.GetBackNpuModel(job.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(job.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID)) | |||||
| } | |||||
| } | |||||
| } | } | ||||
| err = models.UpdateTrainJobVersion(job) | err = models.UpdateTrainJobVersion(job) | ||||
| if err != nil { | if err != nil { | ||||
| @@ -192,6 +199,7 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) { | |||||
| "JobStatus": job.Status, | "JobStatus": job.Status, | ||||
| "JobDuration": job.TrainJobDuration, | "JobDuration": job.TrainJobDuration, | ||||
| "AiCenter": aiCenterName, | "AiCenter": aiCenterName, | ||||
| "StartTime": job.StartTime, | |||||
| }) | }) | ||||
| } | } | ||||
| @@ -319,6 +327,7 @@ func TrainJobGetLog(ctx *context.APIContext) { | |||||
| "Content": result.Content, | "Content": result.Content, | ||||
| "Lines": result.Lines, | "Lines": result.Lines, | ||||
| "CanLogDownload": canLogDownload, | "CanLogDownload": canLogDownload, | ||||
| "StartTime": task.StartTime, | |||||
| }) | }) | ||||
| } | } | ||||
| @@ -458,6 +467,7 @@ func ModelList(ctx *context.APIContext) { | |||||
| return | return | ||||
| } | } | ||||
| status := models.StorageScheduleSucceed | |||||
| var fileInfos []storage.FileInfo | var fileInfos []storage.FileInfo | ||||
| if task.ComputeResource == models.NPUResource { | if task.ComputeResource == models.NPUResource { | ||||
| fileInfos, err = storage.GetObsListObject(task.JobName, "output/", parentDir, versionName) | fileInfos, err = storage.GetObsListObject(task.JobName, "output/", parentDir, versionName) | ||||
| @@ -466,6 +476,30 @@ func ModelList(ctx *context.APIContext) { | |||||
| ctx.ServerError("GetObsListObject:", err) | ctx.ServerError("GetObsListObject:", err) | ||||
| return | return | ||||
| } | } | ||||
| if task.Type == models.TypeC2Net { | |||||
| if len(fileInfos) > 0 { | |||||
| status = models.StorageScheduleSucceed | |||||
| } else { | |||||
| if models.IsTrainJobTerminal(task.Status) { | |||||
| if task.Status == models.GrampusStatusStopped { | |||||
| status = models.StorageNoFile | |||||
| } else { | |||||
| record, _ := models.GetScheduleRecordByCloudbrainID(task.ID) | |||||
| if record != nil { | |||||
| status = record.Status | |||||
| if status == models.StorageScheduleSucceed { | |||||
| status = models.StorageNoFile | |||||
| } | |||||
| } else { | |||||
| status = models.StorageScheduleProcessing | |||||
| } | |||||
| } | |||||
| } else { | |||||
| status = models.StorageScheduleWaiting | |||||
| } | |||||
| } | |||||
| } | |||||
| } else if task.ComputeResource == models.GPUResource { | } else if task.ComputeResource == models.GPUResource { | ||||
| files, err := routerRepo.GetModelDirs(task.JobName, parentDir) | files, err := routerRepo.GetModelDirs(task.JobName, parentDir) | ||||
| if err != nil { | if err != nil { | ||||
| @@ -485,7 +519,7 @@ func ModelList(ctx *context.APIContext) { | |||||
| ctx.JSON(http.StatusOK, map[string]interface{}{ | ctx.JSON(http.StatusOK, map[string]interface{}{ | ||||
| "JobID": jobID, | "JobID": jobID, | ||||
| "VersionName": versionName, | "VersionName": versionName, | ||||
| "StatusOK": 0, | |||||
| "StatusOK": status, | |||||
| "Path": dirArray, | "Path": dirArray, | ||||
| "Dirs": fileInfos, | "Dirs": fileInfos, | ||||
| "task": task, | "task": task, | ||||
| @@ -514,6 +548,7 @@ func GetModelArtsInferenceJob(ctx *context.APIContext) { | |||||
| "JobID": jobID, | "JobID": jobID, | ||||
| "JobStatus": job.Status, | "JobStatus": job.Status, | ||||
| "JobDuration": job.TrainJobDuration, | "JobDuration": job.TrainJobDuration, | ||||
| "StartTime": job.StartTime, | |||||
| }) | }) | ||||
| } | } | ||||
| @@ -29,7 +29,9 @@ const ( | |||||
| tplModelConvertInfo = "repo/modelmanage/convertshowinfo" | tplModelConvertInfo = "repo/modelmanage/convertshowinfo" | ||||
| PYTORCH_ENGINE = 0 | PYTORCH_ENGINE = 0 | ||||
| TENSORFLOW_ENGINE = 1 | TENSORFLOW_ENGINE = 1 | ||||
| MINDSPORE_ENGIN = 2 | |||||
| MINDSPORE_ENGINE = 2 | |||||
| PADDLE_ENGINE = 4 | |||||
| MXNET_ENGINE = 6 | |||||
| ModelMountPath = "/model" | ModelMountPath = "/model" | ||||
| CodeMountPath = "/code" | CodeMountPath = "/code" | ||||
| DataSetMountPath = "/dataset" | DataSetMountPath = "/dataset" | ||||
| @@ -395,6 +397,20 @@ func createGpuTrainJob(modelConvert *models.AiModelConvert, ctx *context.Context | |||||
| deleteLocalDir(relatetiveModelPath) | deleteLocalDir(relatetiveModelPath) | ||||
| dataActualPath = setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.CBCodePathPrefix + modelConvert.ID + "/dataset" | dataActualPath = setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.CBCodePathPrefix + modelConvert.ID + "/dataset" | ||||
| } | } | ||||
| } else if modelConvert.SrcEngine == PADDLE_ENGINE { | |||||
| IMAGE_URL = setting.ModelConvert.GPU_PADDLE_IMAGE | |||||
| if modelConvert.DestFormat == CONVERT_FORMAT_ONNX { | |||||
| command = getGpuModelConvertCommand(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.PaddleOnnxBootFile) | |||||
| } else { | |||||
| return errors.New("Not support the format.") | |||||
| } | |||||
| } else if modelConvert.SrcEngine == MXNET_ENGINE { | |||||
| IMAGE_URL = setting.ModelConvert.GPU_MXNET_IMAGE | |||||
| if modelConvert.DestFormat == CONVERT_FORMAT_ONNX { | |||||
| command = getGpuModelConvertCommand(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.MXnetOnnxBootFile) | |||||
| } else { | |||||
| return errors.New("Not support the format.") | |||||
| } | |||||
| } | } | ||||
| log.Info("dataActualPath=" + dataActualPath) | log.Info("dataActualPath=" + dataActualPath) | ||||
| @@ -17,6 +17,7 @@ import ( | |||||
| "code.gitea.io/gitea/modules/notification" | "code.gitea.io/gitea/modules/notification" | ||||
| "code.gitea.io/gitea/modules/setting" | "code.gitea.io/gitea/modules/setting" | ||||
| "code.gitea.io/gitea/modules/storage" | "code.gitea.io/gitea/modules/storage" | ||||
| "code.gitea.io/gitea/services/cloudbrain/resource" | |||||
| uuid "github.com/satori/go.uuid" | uuid "github.com/satori/go.uuid" | ||||
| ) | ) | ||||
| @@ -69,13 +70,10 @@ func saveModelByParameters(jobId string, versionName string, name string, versio | |||||
| cloudType = models.TypeCloudBrainTwo | cloudType = models.TypeCloudBrainTwo | ||||
| } else if aiTask.ComputeResource == models.GPUResource { | } else if aiTask.ComputeResource == models.GPUResource { | ||||
| cloudType = models.TypeCloudBrainOne | cloudType = models.TypeCloudBrainOne | ||||
| var ResourceSpecs *models.ResourceSpecs | |||||
| json.Unmarshal([]byte(setting.ResourceSpecs), &ResourceSpecs) | |||||
| for _, tmp := range ResourceSpecs.ResourceSpec { | |||||
| if tmp.Id == aiTask.ResourceSpecId { | |||||
| flaverName := ctx.Tr("cloudbrain.gpu_num") + ": " + fmt.Sprint(tmp.GpuNum) + " " + ctx.Tr("cloudbrain.cpu_num") + ": " + fmt.Sprint(tmp.CpuNum) + " " + ctx.Tr("cloudbrain.memory") + "(MB): " + fmt.Sprint(tmp.MemMiB) + " " + ctx.Tr("cloudbrain.shared_memory") + "(MB): " + fmt.Sprint(tmp.ShareMemMiB) | |||||
| aiTask.FlavorName = flaverName | |||||
| } | |||||
| spec, err := resource.GetCloudbrainSpec(aiTask.ID) | |||||
| if err == nil { | |||||
| flaverName := "GPU: " + fmt.Sprint(spec.AccCardsNum) + "*" + spec.AccCardType + ",CPU: " + fmt.Sprint(spec.CpuCores) + "," + ctx.Tr("cloudbrain.memory") + ": " + fmt.Sprint(spec.MemGiB) + "GB," + ctx.Tr("cloudbrain.shared_memory") + ": " + fmt.Sprint(spec.ShareMemGiB) + "GB" | |||||
| aiTask.FlavorName = flaverName | |||||
| } | } | ||||
| } | } | ||||
| @@ -291,6 +291,7 @@ func queryTaskStatusFromCloudbrain(job *models.Cloudbrain) { | |||||
| } else { | } else { | ||||
| // | // | ||||
| job.Status = string(models.ModelSafetyTesting) | job.Status = string(models.ModelSafetyTesting) | ||||
| job.EndTime = 0 | |||||
| err = models.UpdateJob(job) | err = models.UpdateJob(job) | ||||
| if err != nil { | if err != nil { | ||||
| log.Error("UpdateJob failed:", err) | log.Error("UpdateJob failed:", err) | ||||
| @@ -307,6 +308,9 @@ func queryTaskStatusFromModelSafetyTestServer(job *models.Cloudbrain) { | |||||
| if result.Data.Status == 1 { | if result.Data.Status == 1 { | ||||
| log.Info("The task is running....") | log.Info("The task is running....") | ||||
| } else { | } else { | ||||
| job.EndTime = timeutil.TimeStampNow() | |||||
| job.Duration = (job.EndTime.AsTime().Unix() - job.StartTime.AsTime().Unix()) / 1000 | |||||
| job.TrainJobDuration = models.ConvertDurationToStr(job.Duration) | |||||
| if result.Data.Code == 0 { | if result.Data.Code == 0 { | ||||
| job.ResultJson = result.Data.StandardJson | job.ResultJson = result.Data.StandardJson | ||||
| job.Status = string(models.JobSucceeded) | job.Status = string(models.JobSucceeded) | ||||
| @@ -440,6 +444,9 @@ func updateJobFailed(job *models.Cloudbrain, msg string) { | |||||
| //update task failed. | //update task failed. | ||||
| job.Status = string(models.ModelArtsTrainJobFailed) | job.Status = string(models.ModelArtsTrainJobFailed) | ||||
| job.ResultJson = msg | job.ResultJson = msg | ||||
| job.EndTime = timeutil.TimeStampNow() | |||||
| job.Duration = (job.EndTime.AsTime().Unix() - job.StartTime.AsTime().Unix()) / 1000 | |||||
| job.TrainJobDuration = models.ConvertDurationToStr(job.Duration) | |||||
| err := models.UpdateJob(job) | err := models.UpdateJob(job) | ||||
| if err != nil { | if err != nil { | ||||
| log.Error("UpdateJob failed:", err) | log.Error("UpdateJob failed:", err) | ||||
| @@ -2,6 +2,7 @@ package repo | |||||
| import ( | import ( | ||||
| "bufio" | "bufio" | ||||
| "code.gitea.io/gitea/modules/urfs_client/urchin" | |||||
| "encoding/json" | "encoding/json" | ||||
| "errors" | "errors" | ||||
| "fmt" | "fmt" | ||||
| @@ -752,47 +753,48 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo | |||||
| ctx.NotFound(ctx.Req.URL.RequestURI(), nil) | ctx.NotFound(ctx.Req.URL.RequestURI(), nil) | ||||
| return | return | ||||
| } | } | ||||
| result, err := cloudbrain.GetJob(task.JobID) | |||||
| if err != nil { | |||||
| log.Info("error:" + err.Error()) | |||||
| ctx.NotFound(ctx.Req.URL.RequestURI(), nil) | |||||
| return | |||||
| } | |||||
| prepareSpec4Show(ctx, task) | prepareSpec4Show(ctx, task) | ||||
| if ctx.Written() { | if ctx.Written() { | ||||
| return | return | ||||
| } | } | ||||
| if task.Status==string(models.JobWaiting) || task.Status==string(models.JobRunning) { | |||||
| result, err := cloudbrain.GetJob(task.JobID) | |||||
| if err != nil { | |||||
| log.Info("error:" + err.Error()) | |||||
| ctx.NotFound(ctx.Req.URL.RequestURI(), nil) | |||||
| return | |||||
| } | |||||
| if result != nil { | |||||
| jobRes, _ := models.ConvertToJobResultPayload(result.Payload) | |||||
| taskRoles := jobRes.TaskRoles | |||||
| taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{})) | |||||
| ctx.Data["taskRes"] = taskRes | |||||
| ctx.Data["ExitDiagnostics"] = taskRes.TaskStatuses[0].ExitDiagnostics | |||||
| oldStatus := task.Status | |||||
| task.Status = taskRes.TaskStatuses[0].State | |||||
| task.ContainerIp = "" | |||||
| task.ContainerID = taskRes.TaskStatuses[0].ContainerID | |||||
| models.ParseAndSetDurationFromCloudBrainOne(jobRes, task) | |||||
| if task.DeletedAt.IsZero() { //normal record | |||||
| if oldStatus != task.Status { | |||||
| notification.NotifyChangeCloudbrainStatus(task, oldStatus) | |||||
| } | |||||
| err = models.UpdateJob(task) | |||||
| if err != nil { | |||||
| ctx.Data["error"] = err.Error() | |||||
| return | |||||
| } | |||||
| } else { //deleted record | |||||
| if result != nil { | |||||
| jobRes, _ := models.ConvertToJobResultPayload(result.Payload) | |||||
| taskRoles := jobRes.TaskRoles | |||||
| taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{})) | |||||
| ctx.Data["taskRes"] = taskRes | |||||
| ctx.Data["ExitDiagnostics"] = taskRes.TaskStatuses[0].ExitDiagnostics | |||||
| oldStatus := task.Status | |||||
| task.Status = taskRes.TaskStatuses[0].State | |||||
| task.ContainerIp = "" | |||||
| task.ContainerID = taskRes.TaskStatuses[0].ContainerID | |||||
| models.ParseAndSetDurationFromCloudBrainOne(jobRes, task) | |||||
| if task.DeletedAt.IsZero() { //normal record | |||||
| if oldStatus != task.Status { | |||||
| notification.NotifyChangeCloudbrainStatus(task, oldStatus) | |||||
| } | |||||
| err = models.UpdateJob(task) | |||||
| if err != nil { | |||||
| ctx.Data["error"] = err.Error() | |||||
| return | |||||
| } | } | ||||
| } else { //deleted record | |||||
| ctx.Data["result"] = jobRes | |||||
| } else { | |||||
| log.Info("error:" + err.Error()) | |||||
| return | |||||
| } | } | ||||
| ctx.Data["result"] = jobRes | |||||
| } else { | |||||
| log.Info("error:" + err.Error()) | |||||
| return | |||||
| } | } | ||||
| user, err := models.GetUserByID(task.UserID) | user, err := models.GetUserByID(task.UserID) | ||||
| @@ -1942,6 +1944,11 @@ func SyncCloudbrainStatus() { | |||||
| task.CorrectCreateUnix() | task.CorrectCreateUnix() | ||||
| if oldStatus != task.Status { | if oldStatus != task.Status { | ||||
| notification.NotifyChangeCloudbrainStatus(task, oldStatus) | notification.NotifyChangeCloudbrainStatus(task, oldStatus) | ||||
| if models.IsTrainJobTerminal(task.Status) { | |||||
| if len(result.JobInfo.Tasks[0].CenterID) == 1 { | |||||
| urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID)) | |||||
| } | |||||
| } | |||||
| } | } | ||||
| err = models.UpdateJob(task) | err = models.UpdateJob(task) | ||||
| if err != nil { | if err != nil { | ||||
| @@ -0,0 +1,169 @@ | |||||
| package repo | |||||
| import ( | |||||
| "strings" | |||||
| "time" | |||||
| "code.gitea.io/gitea/models" | |||||
| "code.gitea.io/gitea/modules/log" | |||||
| "code.gitea.io/gitea/modules/timeutil" | |||||
| ) | |||||
| func CloudbrainDurationStatisticHour() { | |||||
| dateTime := time.Now().Format("2006-01-02 15:04:05") | |||||
| dayTime := time.Now().Format("2006-01-02") | |||||
| now := time.Now() | |||||
| currentTime := time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), 0, 0, 0, now.Location()) | |||||
| m, _ := time.ParseDuration("-1h") | |||||
| beginTime := currentTime.Add(m).Unix() | |||||
| endTime := currentTime.Unix() | |||||
| hourTime := currentTime.Add(m).Hour() | |||||
| ciTasks, err := models.GetCloudbrainByTime(beginTime, endTime) | |||||
| if err != nil { | |||||
| log.Info("GetCloudbrainByTime err: %v", err) | |||||
| return | |||||
| } | |||||
| specMap := make(map[string]*models.Specification) | |||||
| models.LoadSpecs4CloudbrainInfo(ciTasks) | |||||
| for _, cloudbrain := range ciTasks { | |||||
| if _, ok := specMap[cloudbrain.Cloudbrain.Spec.AiCenterCode+"/"+cloudbrain.Cloudbrain.Spec.AccCardType]; !ok { | |||||
| if cloudbrain.Cloudbrain.Spec != nil { | |||||
| specMap[cloudbrain.Cloudbrain.Spec.AiCenterCode+"/"+cloudbrain.Cloudbrain.Spec.AccCardType] = cloudbrain.Cloudbrain.Spec | |||||
| } | |||||
| } | |||||
| } | |||||
| cloudBrainCenterCodeAndCardTypeInfo := getcloudBrainCenterCodeAndCardTypeInfo(ciTasks, beginTime, endTime) | |||||
| resourceQueues, err := models.GetCanUseCardInfo() | |||||
| if err != nil { | |||||
| log.Info("GetCanUseCardInfo err: %v", err) | |||||
| return | |||||
| } | |||||
| cardsTotalDurationMap := make(map[string]int) | |||||
| for _, resourceQueue := range resourceQueues { | |||||
| cardsTotalDurationMap[resourceQueue.Cluster+"/"+resourceQueue.AiCenterName+"/"+resourceQueue.AiCenterCode+"/"+resourceQueue.AccCardType+"/"+resourceQueue.ComputeResource] = resourceQueue.CardsTotalNum * 1 * 60 * 60 | |||||
| } | |||||
| for centerCode, CardTypeInfo := range cloudBrainCenterCodeAndCardTypeInfo { | |||||
| for cardType, cardDuration := range CardTypeInfo { | |||||
| spec := specMap[centerCode+"/"+cardType] | |||||
| if spec != nil { | |||||
| if err := models.DeleteCloudbrainDurationStatisticHour(dayTime, hourTime, centerCode, cardType); err != nil { | |||||
| log.Error("DeleteCloudbrainDurationStatisticHour failed: %v", err.Error()) | |||||
| return | |||||
| } | |||||
| if _, ok := cardsTotalDurationMap[spec.Cluster+"/"+spec.AiCenterName+"/"+centerCode+"/"+cardType+"/"+spec.ComputeResource]; !ok { | |||||
| cardsTotalDurationMap[spec.Cluster+"/"+spec.AiCenterName+"/"+centerCode+"/"+cardType+"/"+spec.ComputeResource] = 0 | |||||
| } | |||||
| cloudbrainDurationStat := models.CloudbrainDurationStatistic{ | |||||
| DateTime: dateTime, | |||||
| DayTime: dayTime, | |||||
| HourTime: hourTime, | |||||
| Cluster: spec.Cluster, | |||||
| AiCenterName: spec.AiCenterName, | |||||
| AiCenterCode: centerCode, | |||||
| AccCardType: cardType, | |||||
| ComputeResource: spec.ComputeResource, | |||||
| CardsUseDuration: cardDuration, | |||||
| CardsTotalDuration: cardsTotalDurationMap[spec.Cluster+"/"+spec.AiCenterName+"/"+centerCode+"/"+cardType+"/"+spec.ComputeResource], | |||||
| CreatedUnix: timeutil.TimeStampNow(), | |||||
| } | |||||
| if _, err = models.InsertCloudbrainDurationStatistic(&cloudbrainDurationStat); err != nil { | |||||
| log.Error("Insert cloudbrainDurationStat failed: %v", err.Error()) | |||||
| } | |||||
| delete(cardsTotalDurationMap, spec.Cluster+"/"+spec.AiCenterName+"/"+centerCode+"/"+cardType+"/"+spec.ComputeResource) | |||||
| } | |||||
| } | |||||
| } | |||||
| for key, cardsTotalDuration := range cardsTotalDurationMap { | |||||
| if err := models.DeleteCloudbrainDurationStatisticHour(dayTime, hourTime, strings.Split(key, "/")[2], strings.Split(key, "/")[3]); err != nil { | |||||
| log.Error("DeleteCloudbrainDurationStatisticHour failed: %v", err.Error()) | |||||
| return | |||||
| } | |||||
| cloudbrainDurationStat := models.CloudbrainDurationStatistic{ | |||||
| DateTime: dateTime, | |||||
| DayTime: dayTime, | |||||
| HourTime: hourTime, | |||||
| Cluster: strings.Split(key, "/")[0], | |||||
| AiCenterName: strings.Split(key, "/")[1], | |||||
| AiCenterCode: strings.Split(key, "/")[2], | |||||
| AccCardType: strings.Split(key, "/")[3], | |||||
| ComputeResource: strings.Split(key, "/")[4], | |||||
| CardsUseDuration: 0, | |||||
| CardsTotalDuration: cardsTotalDuration, | |||||
| CreatedUnix: timeutil.TimeStampNow(), | |||||
| } | |||||
| if _, err = models.InsertCloudbrainDurationStatistic(&cloudbrainDurationStat); err != nil { | |||||
| log.Error("Insert cloudbrainDurationStat failed: %v", err.Error()) | |||||
| } | |||||
| } | |||||
| log.Info("finish summary cloudbrainDurationStat") | |||||
| } | |||||
| func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, beginTime int64, endTime int64) map[string]map[string]int { | |||||
| var WorkServerNumber int | |||||
| var AccCardsNum int | |||||
| cloudBrainCenterCodeAndCardType := make(map[string]map[string]int) | |||||
| for _, cloudbrain := range ciTasks { | |||||
| if cloudbrain.Cloudbrain.StartTime == 0 { | |||||
| cloudbrain.Cloudbrain.StartTime = cloudbrain.Cloudbrain.CreatedUnix | |||||
| } | |||||
| if cloudbrain.Cloudbrain.EndTime == 0 { | |||||
| cloudbrain.Cloudbrain.EndTime = cloudbrain.Cloudbrain.UpdatedUnix | |||||
| } | |||||
| if cloudbrain.Cloudbrain.WorkServerNumber >= 1 { | |||||
| WorkServerNumber = cloudbrain.Cloudbrain.WorkServerNumber | |||||
| } else { | |||||
| WorkServerNumber = 1 | |||||
| } | |||||
| if cloudbrain.Cloudbrain.Spec == nil { | |||||
| AccCardsNum = 1 | |||||
| } else { | |||||
| AccCardsNum = cloudbrain.Cloudbrain.Spec.AccCardsNum | |||||
| } | |||||
| if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode]; !ok { | |||||
| cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode] = make(map[string]int) | |||||
| } | |||||
| if cloudbrain.Cloudbrain.Status == string(models.ModelArtsRunning) { | |||||
| if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok { | |||||
| if int64(cloudbrain.Cloudbrain.StartTime) < beginTime { | |||||
| cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime)) | |||||
| } else { | |||||
| cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime)) | |||||
| } | |||||
| } else { | |||||
| if int64(cloudbrain.Cloudbrain.StartTime) < beginTime { | |||||
| cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime)) | |||||
| } else { | |||||
| cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime)) | |||||
| } | |||||
| } | |||||
| } else { | |||||
| if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok { | |||||
| if int64(cloudbrain.Cloudbrain.StartTime) < beginTime { | |||||
| cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(beginTime)) | |||||
| } else { | |||||
| cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(cloudbrain.Cloudbrain.StartTime)) | |||||
| } | |||||
| } else { | |||||
| if int64(cloudbrain.Cloudbrain.StartTime) < beginTime { | |||||
| cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(beginTime)) | |||||
| } else { | |||||
| cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(cloudbrain.Cloudbrain.StartTime)) | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| return cloudBrainCenterCodeAndCardType | |||||
| } | |||||
| @@ -433,6 +433,7 @@ func ReferenceDatasetAvailable(ctx *context.Context) { | |||||
| PublicOnly: true, | PublicOnly: true, | ||||
| NeedAttachment: false, | NeedAttachment: false, | ||||
| CloudBrainType: models.TypeCloudBrainAll, | CloudBrainType: models.TypeCloudBrainAll, | ||||
| SearchOrderBy: models.SearchOrderByDefault, | |||||
| } | } | ||||
| dataset, _ := models.GetDatasetByRepo(&models.Repository{ID: ctx.Repo.Repository.ID}) | dataset, _ := models.GetDatasetByRepo(&models.Repository{ID: ctx.Repo.Repository.ID}) | ||||
| if dataset != nil { | if dataset != nil { | ||||
| @@ -448,6 +449,7 @@ func PublicDatasetMultiple(ctx *context.Context) { | |||||
| PublicOnly: true, | PublicOnly: true, | ||||
| NeedAttachment: true, | NeedAttachment: true, | ||||
| CloudBrainType: ctx.QueryInt("type"), | CloudBrainType: ctx.QueryInt("type"), | ||||
| SearchOrderBy: models.SearchOrderByDefault, | |||||
| } | } | ||||
| datasetMultiple(ctx, opts) | datasetMultiple(ctx, opts) | ||||
| @@ -1,6 +1,7 @@ | |||||
| package repo | package repo | ||||
| import ( | import ( | ||||
| "code.gitea.io/gitea/modules/urfs_client/urchin" | |||||
| "encoding/json" | "encoding/json" | ||||
| "errors" | "errors" | ||||
| "fmt" | "fmt" | ||||
| @@ -37,6 +38,7 @@ import ( | |||||
| "code.gitea.io/gitea/modules/context" | "code.gitea.io/gitea/modules/context" | ||||
| "code.gitea.io/gitea/modules/log" | "code.gitea.io/gitea/modules/log" | ||||
| "code.gitea.io/gitea/modules/setting" | "code.gitea.io/gitea/modules/setting" | ||||
| cloudbrainService "code.gitea.io/gitea/services/cloudbrain" | |||||
| ) | ) | ||||
| const ( | const ( | ||||
| @@ -430,7 +432,7 @@ func grampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain | |||||
| //prepare command | //prepare command | ||||
| preTrainModelPath := getPreTrainModelPath(form.PreTrainModelUrl, form.CkptName) | preTrainModelPath := getPreTrainModelPath(form.PreTrainModelUrl, form.CkptName) | ||||
| command, err := generateCommand(repo.Name, grampus.ProcessorTypeGPU, codeMinioPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CBCodePathPrefix+jobName+cloudbrain.ModelMountPath+"/", allFileName, preTrainModelPath, form.CkptName) | |||||
| command, err := generateCommand(repo.Name, grampus.ProcessorTypeGPU, codeMinioPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CBCodePathPrefix+jobName+cloudbrain.ModelMountPath+"/", allFileName, preTrainModelPath, form.CkptName, "") | |||||
| if err != nil { | if err != nil { | ||||
| log.Error("Failed to generateCommand: %s (%v)", displayJobName, err, ctx.Data["MsgID"]) | log.Error("Failed to generateCommand: %s (%v)", displayJobName, err, ctx.Data["MsgID"]) | ||||
| grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) | grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) | ||||
| @@ -687,7 +689,7 @@ func grampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain | |||||
| //prepare command | //prepare command | ||||
| preTrainModelPath := getPreTrainModelPath(form.PreTrainModelUrl, form.CkptName) | preTrainModelPath := getPreTrainModelPath(form.PreTrainModelUrl, form.CkptName) | ||||
| command, err := generateCommand(repo.Name, grampus.ProcessorTypeNPU, codeObsPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CodePathPrefix+jobName+modelarts.OutputPath, allFileName, preTrainModelPath, form.CkptName) | |||||
| command, err := generateCommand(repo.Name, grampus.ProcessorTypeNPU, codeObsPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CodePathPrefix+jobName+modelarts.OutputPath, allFileName, preTrainModelPath, form.CkptName, grampus.GetNpuModelRemoteObsUrl(jobName)) | |||||
| if err != nil { | if err != nil { | ||||
| log.Error("Failed to generateCommand: %s (%v)", displayJobName, err, ctx.Data["MsgID"]) | log.Error("Failed to generateCommand: %s (%v)", displayJobName, err, ctx.Data["MsgID"]) | ||||
| grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) | grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) | ||||
| @@ -861,7 +863,7 @@ func GrampusTrainJobShow(ctx *context.Context) { | |||||
| } | } | ||||
| oldStatus := task.Status | oldStatus := task.Status | ||||
| task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status) | task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status) | ||||
| if task.Status != result.JobInfo.Status || result.JobInfo.Status == models.GrampusStatusRunning { | |||||
| if task.Status != oldStatus || task.Status == models.GrampusStatusRunning { | |||||
| task.Duration = result.JobInfo.RunSec | task.Duration = result.JobInfo.RunSec | ||||
| if task.Duration < 0 { | if task.Duration < 0 { | ||||
| task.Duration = 0 | task.Duration = 0 | ||||
| @@ -877,6 +879,11 @@ func GrampusTrainJobShow(ctx *context.Context) { | |||||
| task.CorrectCreateUnix() | task.CorrectCreateUnix() | ||||
| if oldStatus != task.Status { | if oldStatus != task.Status { | ||||
| notification.NotifyChangeCloudbrainStatus(task, oldStatus) | notification.NotifyChangeCloudbrainStatus(task, oldStatus) | ||||
| if models.IsTrainJobTerminal(task.Status) { | |||||
| if len(result.JobInfo.Tasks[0].CenterID) == 1 { | |||||
| urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID)) | |||||
| } | |||||
| } | |||||
| } | } | ||||
| err = models.UpdateJob(task) | err = models.UpdateJob(task) | ||||
| if err != nil { | if err != nil { | ||||
| @@ -915,10 +922,7 @@ func GrampusTrainJobShow(ctx *context.Context) { | |||||
| ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task) | ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task) | ||||
| ctx.Data["displayJobName"] = task.DisplayJobName | ctx.Data["displayJobName"] = task.DisplayJobName | ||||
| aiCenterInfo := strings.Split(task.AiCenter, "+") | |||||
| if len(aiCenterInfo) == 2 { | |||||
| ctx.Data["ai_center"] = aiCenterInfo[1] | |||||
| } | |||||
| ctx.Data["ai_center"] = cloudbrainService.GetAiCenterShow(task.AiCenter, ctx) | |||||
| ctx.HTML(http.StatusOK, tplGrampusTrainJobShow) | ctx.HTML(http.StatusOK, tplGrampusTrainJobShow) | ||||
| } | } | ||||
| @@ -974,15 +978,18 @@ func GrampusGetLog(ctx *context.Context) { | |||||
| return | return | ||||
| } | } | ||||
| func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bootFile, paramSrc, outputRemotePath, datasetName, pretrainModelPath, pretrainModelFileName string) (string, error) { | |||||
| func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bootFile, paramSrc, outputRemotePath, datasetName, pretrainModelPath, pretrainModelFileName, modelRemoteObsUrl string) (string, error) { | |||||
| var command string | var command string | ||||
| //prepare | |||||
| workDir := grampus.NpuWorkDir | workDir := grampus.NpuWorkDir | ||||
| if processorType == grampus.ProcessorTypeGPU { | |||||
| if processorType == grampus.ProcessorTypeNPU { | |||||
| command += "pwd;cd " + workDir + grampus.CommandPrepareScriptNpu | |||||
| } else if processorType == grampus.ProcessorTypeGPU { | |||||
| workDir = grampus.GpuWorkDir | workDir = grampus.GpuWorkDir | ||||
| command += "pwd;cd " + workDir + fmt.Sprintf(grampus.CommandPrepareScriptGpu, setting.Grampus.SyncScriptProject, setting.Grampus.SyncScriptProject) | |||||
| } | } | ||||
| command += "pwd;cd " + workDir + fmt.Sprintf(grampus.CommandPrepareScript, setting.Grampus.SyncScriptProject, setting.Grampus.SyncScriptProject) | |||||
| //download code & dataset | //download code & dataset | ||||
| if processorType == grampus.ProcessorTypeNPU { | if processorType == grampus.ProcessorTypeNPU { | ||||
| //no need to download code & dataset by internet | //no need to download code & dataset by internet | ||||
| @@ -997,7 +1004,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo | |||||
| //no need to process | //no need to process | ||||
| } else if processorType == grampus.ProcessorTypeGPU { | } else if processorType == grampus.ProcessorTypeGPU { | ||||
| unZipDatasetCommand := generateDatasetUnzipCommand(datasetName) | unZipDatasetCommand := generateDatasetUnzipCommand(datasetName) | ||||
| commandUnzip := "cd " + workDir + "code;unzip -q master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand | |||||
| commandUnzip := "cd " + workDir + "code;unzip -q master.zip;rm -f master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand | |||||
| command += commandUnzip | command += commandUnzip | ||||
| } | } | ||||
| @@ -1031,7 +1038,8 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo | |||||
| var commandCode string | var commandCode string | ||||
| if processorType == grampus.ProcessorTypeNPU { | if processorType == grampus.ProcessorTypeNPU { | ||||
| commandCode = "/bin/bash /home/work/run_train_for_openi.sh /home/work/openi.py /tmp/log/train.log" + paramCode + ";" | |||||
| paramCode += " --model_url=" + modelRemoteObsUrl | |||||
| commandCode = "/bin/bash /home/work/run_train_for_openi.sh /home/work/openi.py " + grampus.NpuLocalLogUrl + paramCode + ";" | |||||
| } else if processorType == grampus.ProcessorTypeGPU { | } else if processorType == grampus.ProcessorTypeGPU { | ||||
| if pretrainModelFileName != "" { | if pretrainModelFileName != "" { | ||||
| paramCode += " --ckpt_url" + "=" + workDir + "pretrainmodel/" + pretrainModelFileName | paramCode += " --ckpt_url" + "=" + workDir + "pretrainmodel/" + pretrainModelFileName | ||||
| @@ -1047,8 +1055,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo | |||||
| //upload models | //upload models | ||||
| if processorType == grampus.ProcessorTypeNPU { | if processorType == grampus.ProcessorTypeNPU { | ||||
| commandUpload := "cd " + workDir + setting.Grampus.SyncScriptProject + "/;./uploader_for_npu " + setting.Bucket + " " + outputRemotePath + " " + workDir + "output/;" | |||||
| command += commandUpload | |||||
| // no need to upload | |||||
| } else if processorType == grampus.ProcessorTypeGPU { | } else if processorType == grampus.ProcessorTypeGPU { | ||||
| commandUpload := "cd " + workDir + setting.Grampus.SyncScriptProject + "/;./uploader_for_gpu " + setting.Grampus.Env + " " + outputRemotePath + " " + workDir + "output/;" | commandUpload := "cd " + workDir + setting.Grampus.SyncScriptProject + "/;./uploader_for_gpu " + setting.Grampus.Env + " " + outputRemotePath + " " + workDir + "output/;" | ||||
| command += commandUpload | command += commandUpload | ||||
| @@ -1079,6 +1086,7 @@ func generateDatasetUnzipCommand(datasetName string) string { | |||||
| if strings.HasSuffix(datasetNameArray[0], ".tar.gz") { | if strings.HasSuffix(datasetNameArray[0], ".tar.gz") { | ||||
| unZipDatasetCommand = "tar --strip-components=1 -zxvf '" + datasetName + "';" | unZipDatasetCommand = "tar --strip-components=1 -zxvf '" + datasetName + "';" | ||||
| } | } | ||||
| unZipDatasetCommand += "rm -f '" + datasetName + "';" | |||||
| } else { //多数据集 | } else { //多数据集 | ||||
| for _, datasetNameTemp := range datasetNameArray { | for _, datasetNameTemp := range datasetNameArray { | ||||
| @@ -1087,6 +1095,7 @@ func generateDatasetUnzipCommand(datasetName string) string { | |||||
| } else { | } else { | ||||
| unZipDatasetCommand = unZipDatasetCommand + "unzip -q '" + datasetNameTemp + "' -d './" + strings.TrimSuffix(datasetNameTemp, ".zip") + "';" | unZipDatasetCommand = unZipDatasetCommand + "unzip -q '" + datasetNameTemp + "' -d './" + strings.TrimSuffix(datasetNameTemp, ".zip") + "';" | ||||
| } | } | ||||
| unZipDatasetCommand += "rm -f '" + datasetNameTemp + "';" | |||||
| } | } | ||||
| } | } | ||||
| @@ -1312,6 +1312,36 @@ func getUserCommand(engineId int, req *modelarts.GenerateTrainJobReq) (string, s | |||||
| return userCommand, userImageUrl | return userCommand, userImageUrl | ||||
| } | } | ||||
| func getInfJobUserCommand(engineId int, req *modelarts.GenerateInferenceJobReq) (string, string) { | |||||
| userImageUrl := "" | |||||
| userCommand := "" | |||||
| if engineId < 0 { | |||||
| tmpCodeObsPath := strings.Trim(req.CodeObsPath, "/") | |||||
| tmpCodeObsPaths := strings.Split(tmpCodeObsPath, "/") | |||||
| lastCodeDir := "code" | |||||
| if len(tmpCodeObsPaths) > 0 { | |||||
| lastCodeDir = tmpCodeObsPaths[len(tmpCodeObsPaths)-1] | |||||
| } | |||||
| userCommand = "/bin/bash /home/work/run_train.sh 's3://" + req.CodeObsPath + "' '" + lastCodeDir + "/" + req.BootFile + "' '/tmp/log/train.log' --'data_url'='s3://" + req.DataUrl + "' --'train_url'='s3://" + req.TrainUrl + "'" | |||||
| var versionInfos modelarts.VersionInfo | |||||
| if err := json.Unmarshal([]byte(setting.EngineVersions), &versionInfos); err != nil { | |||||
| log.Info("json parse err." + err.Error()) | |||||
| } else { | |||||
| for _, engine := range versionInfos.Version { | |||||
| if engine.ID == engineId { | |||||
| userImageUrl = engine.Url | |||||
| break | |||||
| } | |||||
| } | |||||
| } | |||||
| for _, param := range req.Parameters { | |||||
| userCommand += " --'" + param.Label + "'='" + param.Value + "'" | |||||
| } | |||||
| return userCommand, userImageUrl | |||||
| } | |||||
| return userCommand, userImageUrl | |||||
| } | |||||
| func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) { | func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) { | ||||
| ctx.Data["PageIsTrainJob"] = true | ctx.Data["PageIsTrainJob"] = true | ||||
| var jobID = ctx.Params(":jobid") | var jobID = ctx.Params(":jobid") | ||||
| @@ -2171,6 +2201,10 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference | |||||
| JobType: string(models.JobTypeInference), | JobType: string(models.JobTypeInference), | ||||
| } | } | ||||
| userCommand, userImageUrl := getInfJobUserCommand(engineID, req) | |||||
| req.UserCommand = userCommand | |||||
| req.UserImageUrl = userImageUrl | |||||
| _, err = modelarts.GenerateInferenceJob(ctx, req) | _, err = modelarts.GenerateInferenceJob(ctx, req) | ||||
| if err != nil { | if err != nil { | ||||
| log.Error("GenerateTrainJob failed:%v", err.Error()) | log.Error("GenerateTrainJob failed:%v", err.Error()) | ||||
| @@ -8,3 +8,7 @@ var SPECIFICATION_NOT_AVAILABLE = &BizError{Code: 1003, Err: "specification not | |||||
| var CATEGORY_STILL_HAS_BADGES = &BizError{Code: 1004, Err: "Please delete badges in the category first"} | var CATEGORY_STILL_HAS_BADGES = &BizError{Code: 1004, Err: "Please delete badges in the category first"} | ||||
| var BADGES_STILL_HAS_USERS = &BizError{Code: 1005, Err: "Please delete users of badge first"} | var BADGES_STILL_HAS_USERS = &BizError{Code: 1005, Err: "Please delete users of badge first"} | ||||
| //common response | |||||
| var SYSTEM_ERROR = &BizError{Code: 9009, Err: "System error.Please try again later"} | |||||
| var INSUFFICIENT_PERMISSION = &BizError{Code: 9003, Err: "insufficient permissions"} | |||||
| @@ -6,17 +6,18 @@ package routes | |||||
| import ( | import ( | ||||
| "bytes" | "bytes" | ||||
| "code.gitea.io/gitea/routers/badge" | |||||
| "code.gitea.io/gitea/routers/reward/point" | |||||
| "code.gitea.io/gitea/routers/task" | |||||
| badge_service "code.gitea.io/gitea/services/badge" | |||||
| "code.gitea.io/gitea/services/reward" | |||||
| "encoding/gob" | "encoding/gob" | ||||
| "net/http" | "net/http" | ||||
| "path" | "path" | ||||
| "text/template" | "text/template" | ||||
| "time" | "time" | ||||
| "code.gitea.io/gitea/routers/badge" | |||||
| "code.gitea.io/gitea/routers/reward/point" | |||||
| "code.gitea.io/gitea/routers/task" | |||||
| badge_service "code.gitea.io/gitea/services/badge" | |||||
| "code.gitea.io/gitea/services/reward" | |||||
| "code.gitea.io/gitea/routers/modelapp" | "code.gitea.io/gitea/routers/modelapp" | ||||
| "code.gitea.io/gitea/modules/slideimage" | "code.gitea.io/gitea/modules/slideimage" | ||||
| @@ -835,13 +835,17 @@ func Cloudbrains(ctx *context.Context) { | |||||
| ctx.ServerError("Get job failed:", err) | ctx.ServerError("Get job failed:", err) | ||||
| return | return | ||||
| } | } | ||||
| models.LoadSpecs4CloudbrainInfo(ciTasks) | |||||
| for i, task := range ciTasks { | |||||
| for i, _ := range ciTasks { | |||||
| ciTasks[i].CanDebug = true | ciTasks[i].CanDebug = true | ||||
| ciTasks[i].CanDel = true | ciTasks[i].CanDel = true | ||||
| ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource | |||||
| } | |||||
| ciTasks[i].Cloudbrain.ComputeResource = ciTasks[i].ComputeResource | |||||
| if ciTasks[i].Cloudbrain.Type == models.TypeC2Net { | |||||
| ciTasks[i].Cloudbrain.Spec.Cluster = models.C2NetCluster | |||||
| } else { | |||||
| ciTasks[i].Cloudbrain.Spec.Cluster = models.OpenICluster | |||||
| } | |||||
| } | |||||
| pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, getTotalPage(count, setting.UI.IssuePagingNum)) | pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, getTotalPage(count, setting.UI.IssuePagingNum)) | ||||
| pager.SetDefaultParams(ctx) | pager.SetDefaultParams(ctx) | ||||
| pager.AddParam(ctx, "listType", "ListType") | pager.AddParam(ctx, "listType", "ListType") | ||||
| @@ -0,0 +1,33 @@ | |||||
| package cloudbrain | |||||
| import ( | |||||
| "code.gitea.io/gitea/modules/context" | |||||
| "code.gitea.io/gitea/modules/setting" | |||||
| "strings" | |||||
| ) | |||||
| func GetAiCenterShow(aiCenter string,ctx *context.Context) string{ | |||||
| aiCenterInfo := strings.Split(aiCenter, "+") | |||||
| if len(aiCenterInfo) == 2{ | |||||
| if setting.C2NetMapInfo!=nil { | |||||
| if info,ok:=setting.C2NetMapInfo[aiCenterInfo[0]];ok { | |||||
| if ctx.Language() == "zh-CN" { | |||||
| return info.Content | |||||
| } else { | |||||
| return info.ContentEN | |||||
| } | |||||
| }else{ | |||||
| return aiCenterInfo[1] | |||||
| } | |||||
| }else{ | |||||
| return aiCenterInfo[1] | |||||
| } | |||||
| } | |||||
| return "" | |||||
| } | |||||
| @@ -497,6 +497,17 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <div class="ui modal debug-again-alert"> | |||||
| <div class="ui message" style="background-color: rgba(242, 113, 28, 0.05);border: 1px solid rgba(242, 113, 28, 1);border-radius: 5px;"> | |||||
| <div style="display: flex;align-items: center;"> | |||||
| <i class="ri-information-line" style="font-size: 35px;color: rgba(242, 113, 28, 1);;"></i> | |||||
| <div style="text-align: left;margin-left: 1rem;"> | |||||
| <div style="font-weight: 600;line-height: 2;">{{.i18n.Tr "repo.cloudbrain.morethanonejob1" | Safe }}</div> | |||||
| <div style="color:#939393">{{.i18n.Tr "repo.cloudbrain.morethanonejob2" | Safe}}</div> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| </div> | </div> | ||||
| <script> | <script> | ||||
| document.addEventListener('DOMContentLoaded', function() { | document.addEventListener('DOMContentLoaded', function() { | ||||
| @@ -16,19 +16,11 @@ | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster=resource_cluster_c2net&aiCenter={{$.aiCenter}}" data-value="{{.i18n.Tr "cloudbrain.resource_cluster_c2net"}}">{{.i18n.Tr "cloudbrain.resource_cluster_c2net"}}</a> | <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster=resource_cluster_c2net&aiCenter={{$.aiCenter}}" data-value="{{.i18n.Tr "cloudbrain.resource_cluster_c2net"}}">{{.i18n.Tr "cloudbrain.resource_cluster_c2net"}}</a> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <div class="ui selection dropdown" style="min-width: 10em;min-height:2.6em;border-radius: .28571429rem;margin-right: 1em;padding: .67em 3.2em .7em 1em;"> | |||||
| <div class="default text" style="color: rgba(0,0,0,.87);">{{.i18n.Tr "cloudbrain.all_ai_center"}}</div> | |||||
| <div class="ui selection dropdown" id="aiCenter-sel" style="min-width: 10em;min-height:2.6em;border-radius: .28571429rem;margin-right: 1em;padding: .67em 3.2em .7em 1em;"> | |||||
| <div class="default text" style="color: rgba(0,0,0,.87);" aicenter="{{$.aiCenter}}">{{if eq $.aiCenter ""}}{{.i18n.Tr "cloudbrain.all_ai_center"}}{{end}}</div> | |||||
| <i class="dropdown icon"></i> | <i class="dropdown icon"></i> | ||||
| <div class="menu"> | <div class="menu"> | ||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=" data-value='{{.i18n.Tr "cloudbrain.all_ai_center"}}'>{{.i18n.Tr "cloudbrain.all_ai_center"}}</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=鹏城云计算所" data-value="鹏城云计算所">鹏城云计算所</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=成都智算" data-value="成都智算">成都智算</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=合肥类脑" data-value="合肥类脑">合肥类脑</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=octopus" data-value="octopus">octopus</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=武汉智算" data-value="武汉智算">武汉智算</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=西安智算" data-value="西安智算">西安智算</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=中原智算" data-value="中原智算">中原智算</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=许昌AI中心" data-value="许昌AI中心">许昌AI中心</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=" data-value='{{.i18n.Tr "cloudbrain.all_ai_center"}}'>{{.i18n.Tr "cloudbrain.all_ai_center"}}</a> | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <div class="ui selection dropdown" style="min-width: 10em;min-height:2.6em;border-radius: .28571429rem;margin-right: 1em;padding: .67em 3.2em .7em 1em;"> | <div class="ui selection dropdown" style="min-width: 10em;min-height:2.6em;border-radius: .28571429rem;margin-right: 1em;padding: .67em 3.2em .7em 1em;"> | ||||
| @@ -73,4 +65,47 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | |||||
| </div> | |||||
| <script> | |||||
| ;(function() { | |||||
| document.addEventListener('DOMContentLoaded', function() { | |||||
| $.ajax({ | |||||
| type: "GET", | |||||
| url: "/api/v1/cloudbrain/get_center_info", | |||||
| dataType: "json", | |||||
| data: {}, | |||||
| success: function (res) { | |||||
| var data = res || []; | |||||
| var aiCenterSelEl = $('#aiCenter-sel'); | |||||
| var itemEl = aiCenterSelEl.find('.menu .item').eq(0); | |||||
| var selectAiCenterCode = aiCenterSelEl.find('.default').attr('aicenter'); | |||||
| var selectAiCenterName = ''; | |||||
| var lang = document.querySelector('html').getAttribute('lang') || 'en-US'; | |||||
| var except = ['', 'more']; | |||||
| for (var i = 0, iLen = data.length; i < iLen; i++) { | |||||
| var dataI = data[i]; | |||||
| var aiCenterCode = dataI.name; | |||||
| if (except.indexOf(aiCenterCode) >= 0) continue; | |||||
| var aiCenterName = lang === 'en-US' ? dataI.content_en : dataI.content; | |||||
| var itemClone = itemEl.clone(); | |||||
| var oHref = itemClone.attr('href'); | |||||
| var oId = itemClone.attr('id'); | |||||
| itemClone.attr('data-value', aiCenterCode); | |||||
| itemClone.removeAttr('id'); | |||||
| itemClone.attr('href', oHref + aiCenterCode); | |||||
| itemClone.text(aiCenterName); | |||||
| aiCenterSelEl.find('.menu').append(itemClone); | |||||
| if (selectAiCenterCode === aiCenterCode) { | |||||
| selectAiCenterName = aiCenterName; | |||||
| } | |||||
| } | |||||
| selectAiCenterCode && aiCenterSelEl.dropdown('set selected', selectAiCenterCode); | |||||
| selectAiCenterName && aiCenterSelEl.dropdown('set text', selectAiCenterName); | |||||
| }, | |||||
| error: function (err) { | |||||
| console.log(err); | |||||
| } | |||||
| }); | |||||
| }); | |||||
| })(); | |||||
| </script> | |||||
| @@ -28,19 +28,11 @@ | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster=resource_cluster_c2net&aiCenter={{$.aiCenter}}" data-value="{{.i18n.Tr "cloudbrain.resource_cluster_c2net"}}">{{.i18n.Tr "cloudbrain.resource_cluster_c2net"}}</a> | <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster=resource_cluster_c2net&aiCenter={{$.aiCenter}}" data-value="{{.i18n.Tr "cloudbrain.resource_cluster_c2net"}}">{{.i18n.Tr "cloudbrain.resource_cluster_c2net"}}</a> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <div class="ui selection dropdown" style="min-width: 10em;min-height:2.6em;border-radius: .28571429rem;margin-right: 1em;padding: .67em 3.2em .7em 1em;"> | |||||
| <div class="default text" style="color: rgba(0,0,0,.87);">{{.i18n.Tr "cloudbrain.all_ai_center"}}</div> | |||||
| <div class="ui selection dropdown" id="aiCenter-sel" style="min-width: 10em;min-height:2.6em;border-radius: .28571429rem;margin-right: 1em;padding: .67em 3.2em .7em 1em;"> | |||||
| <div class="default text" style="color: rgba(0,0,0,.87);" aicenter="{{$.aiCenter}}">{{if eq $.aiCenter ""}}{{.i18n.Tr "cloudbrain.all_ai_center"}}{{end}}</div> | |||||
| <i class="dropdown icon"></i> | <i class="dropdown icon"></i> | ||||
| <div class="menu"> | <div class="menu"> | ||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=" data-value='{{.i18n.Tr "cloudbrain.all_ai_center"}}'>{{.i18n.Tr "cloudbrain.all_ai_center"}}</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=鹏城云计算所" data-value="鹏城云计算所">鹏城云计算所</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=成都智算" data-value="成都智算">成都智算</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=合肥类脑" data-value="合肥类脑">合肥类脑</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=octopus" data-value="octopus">octopus</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=武汉智算" data-value="武汉智算">武汉智算</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=西安智算" data-value="西安智算">西安智算</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=中原智算" data-value="中原智算">中原智算</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=许昌AI中心" data-value="许昌AI中心">许昌AI中心</a> | |||||
| <a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=" data-value='{{.i18n.Tr "cloudbrain.all_ai_center"}}'>{{.i18n.Tr "cloudbrain.all_ai_center"}}</a> | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <div class="ui selection dropdown" style="min-width: 10em;min-height:2.6em;border-radius: .28571429rem;margin-right: 1em;padding: .67em 3.2em .7em 1em;"> | <div class="ui selection dropdown" style="min-width: 10em;min-height:2.6em;border-radius: .28571429rem;margin-right: 1em;padding: .67em 3.2em .7em 1em;"> | ||||
| @@ -87,4 +79,47 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | |||||
| </div> | |||||
| <script> | |||||
| ;(function() { | |||||
| document.addEventListener('DOMContentLoaded', function() { | |||||
| $.ajax({ | |||||
| type: "GET", | |||||
| url: "/api/v1/cloudbrain/get_center_info", | |||||
| dataType: "json", | |||||
| data: {}, | |||||
| success: function (res) { | |||||
| var data = res || []; | |||||
| var aiCenterSelEl = $('#aiCenter-sel'); | |||||
| var itemEl = aiCenterSelEl.find('.menu .item').eq(0); | |||||
| var selectAiCenterCode = aiCenterSelEl.find('.default').attr('aicenter'); | |||||
| var selectAiCenterName = ''; | |||||
| var lang = document.querySelector('html').getAttribute('lang') || 'en-US'; | |||||
| var except = ['', 'more']; | |||||
| for (var i = 0, iLen = data.length; i < iLen; i++) { | |||||
| var dataI = data[i]; | |||||
| var aiCenterCode = dataI.name; | |||||
| if (except.indexOf(aiCenterCode) >= 0) continue; | |||||
| var aiCenterName = lang === 'en-US' ? dataI.content_en : dataI.content; | |||||
| var itemClone = itemEl.clone(); | |||||
| var oHref = itemClone.attr('href'); | |||||
| var oId = itemClone.attr('id'); | |||||
| itemClone.attr('data-value', aiCenterCode); | |||||
| itemClone.removeAttr('id'); | |||||
| itemClone.attr('href', oHref + aiCenterCode); | |||||
| itemClone.text(aiCenterName); | |||||
| aiCenterSelEl.find('.menu').append(itemClone); | |||||
| if (selectAiCenterCode === aiCenterCode) { | |||||
| selectAiCenterName = aiCenterName; | |||||
| } | |||||
| } | |||||
| selectAiCenterCode && aiCenterSelEl.dropdown('set selected', selectAiCenterCode); | |||||
| selectAiCenterName && aiCenterSelEl.dropdown('set text', selectAiCenterName); | |||||
| }, | |||||
| error: function (err) { | |||||
| console.log(err); | |||||
| } | |||||
| }); | |||||
| }); | |||||
| })(); | |||||
| </script> | |||||
| @@ -45,7 +45,7 @@ | |||||
| <div class="sixteen wide mobile eight wide tablet eight wide computer column" style=" margin:2.0rem 0"> | <div class="sixteen wide mobile eight wide tablet eight wide computer column" style=" margin:2.0rem 0"> | ||||
| {{.i18n.Tr "custom.foot.copyright"}} <a href="http://beian.miit.gov.cn/" target="_blank">京ICP备18004880号</a> | {{.i18n.Tr "custom.foot.copyright"}} <a href="http://beian.miit.gov.cn/" target="_blank">京ICP备18004880号</a> | ||||
| <br> | <br> | ||||
| {{.i18n.Tr "Powered_by 鹏城实验室云脑、"}}<a href="https://www.trustie.net/" target="_blank">Trustie确实</a>{{.i18n.Tr "、gitea"}} | |||||
| {{.i18n.Tr "home.powerdby"}}<a href="https://www.trustie.net/" target="_blank">Trustie确实</a>{{.i18n.Tr "、Gitea"}} | |||||
| <br> | <br> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| @@ -1,11 +1,10 @@ | |||||
| {{if .NotStopTaskCount}} | {{if .NotStopTaskCount}} | ||||
| <div class="ui message" style="background-color: rgba(242, 113, 28, 0.05);border: 1px solid rgba(242, 113, 28, 1);border-radius: 5px;"> | |||||
| <i class="close icon"></i> | |||||
| <div class="ui message" style="background-color: rgba(242, 113, 28, 0.05);border: 1px solid rgba(242, 113, 28, 1);border-radius: 5px;"> | |||||
| <div style="display: flex;align-items: center;"> | <div style="display: flex;align-items: center;"> | ||||
| <i class="ri-information-line" style="font-size: 35px;color: rgba(242, 113, 28, 1);;"></i> | <i class="ri-information-line" style="font-size: 35px;color: rgba(242, 113, 28, 1);;"></i> | ||||
| <div style="text-align: left;margin-left: 1rem;"> | <div style="text-align: left;margin-left: 1rem;"> | ||||
| <div style="font-weight: 600;line-height: 2;">您已经有 <span style="color:rgba(242, 113, 28, 1);">同类任务</span> 正在等待或运行中,请等待任务结束再创建</div> | |||||
| <div style="color:#939393">可以在 “<a href="/cloudbrains" >个人中心 > 云脑任务</a>” 查看您所有的云脑任务</div> | |||||
| <div style="font-weight: 600;line-height: 2;">{{.i18n.Tr "repo.cloudbrain.morethanonejob1" | Safe }}</div> | |||||
| <div style="color:#939393">{{.i18n.Tr "repo.cloudbrain.morethanonejob2" | Safe}}</div> | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| @@ -0,0 +1,47 @@ | |||||
| <div> | |||||
| <div class="ui modal max-full-log{{.VersionName}} container" style="height: 90%;margin: 3rem auto;"> | |||||
| <div class="file-info" style="padding: 2rem;justify-content: space-between;height: 10%;"> | |||||
| <div id="log-file-title" style="font-size: 16px;font-weight:600"></div> | |||||
| <div class="file-info"> | |||||
| <a class="file-info" id="{{.VersionName}}-log-down" href=""> | |||||
| <i class="ri-download-cloud-2-line"></i> | |||||
| <span style="margin-left: 0.3rem;font-size: 12px;" class="log-file-down"></span> | |||||
| </a> | |||||
| <div class="file-info close-log-dialog" data-version="{{.VersionName}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;"> | |||||
| <i class="ri-fullscreen-exit-fill" style="font-size: 16px;"></i> | |||||
| <span id="log-file-exit" style="margin-left: 0.3rem;font-size: 12px;"></span> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| <div style="margin: 0 2.5rem;border: 1px solid #e8e8e8;height: 85%;position: relative;"> | |||||
| <span> | |||||
| <a style="position: absolute; right: -32px;cursor: pointer;" | |||||
| class="log_top-max" data-version="{{.VersionName}}" data-max="-max"><i class="icon-to-top"></i></a> | |||||
| </span> | |||||
| <span class="log-info-{{.VersionName}}"> | |||||
| <a style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||||
| class="log_bottom-max" data-version="{{.VersionName}}" data-max="-max"><i | |||||
| class="icon-to-bottom"></i></a> | |||||
| </span> | |||||
| <div class="ui message message-max{{.VersionName}}" style="display: none;"> | |||||
| <div id="header"></div> | |||||
| </div> | |||||
| <div class="log-scroll-max" id="log-max{{.VersionName}}" data-version="{{.VersionName}}" style="overflow: auto;max-height: 100%;"> | |||||
| <div class="ui inverted active dimmer"> | |||||
| <div class="ui loader"></div> | |||||
| </div> | |||||
| <pre id="log_file-max{{.VersionName}}"></pre> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| <script> | |||||
| $(".ui.modal .close-log-dialog").click(function () { | |||||
| let version_name = $(this).data("version"); | |||||
| $(`.ui.modal.max-full-log${version_name}`).modal("hide"); | |||||
| }); | |||||
| </script> | |||||
| @@ -1,7 +1,7 @@ | |||||
| <div class="badge-achive"> | <div class="badge-achive"> | ||||
| {{range .AllBadges }} | {{range .AllBadges }} | ||||
| <div class="bagde-section"> | <div class="bagde-section"> | ||||
| <div class="badge-section-title">{{.CategoryName}} (已点亮{{.LightedNum}}个)</div> | |||||
| <div class="badge-section-title">{{.CategoryName}} (已点亮 {{.LightedNum}} 个)</div> | |||||
| <div class="badge-section-children"> | <div class="badge-section-children"> | ||||
| <div class="badge-honor-badge"> | <div class="badge-honor-badge"> | ||||
| <div class="badge-honor-badge-basic"> | <div class="badge-honor-badge-basic"> | ||||
| @@ -32,7 +32,9 @@ | |||||
| <div class="repository"> | <div class="repository"> | ||||
| {{template "repo/header" .}} | {{template "repo/header" .}} | ||||
| <div class="ui container"> | <div class="ui container"> | ||||
| {{template "base/alert" .}} | |||||
| {{if eq .NotStopTaskCount 0}} | |||||
| {{template "base/alert" .}} | |||||
| {{end}} | |||||
| {{template "custom/alert_cb" .}} | {{template "custom/alert_cb" .}} | ||||
| <h4 class="ui top attached header"> | <h4 class="ui top attached header"> | ||||
| {{.i18n.Tr "repo.modelarts.evaluate_job.new_job"}} | {{.i18n.Tr "repo.modelarts.evaluate_job.new_job"}} | ||||
| @@ -151,7 +153,7 @@ | |||||
| </div> | </div> | ||||
| <div class="inline min_title field"> | <div class="inline min_title field"> | ||||
| <label class="label-fix-width" style="font-weight: normal;"></label> | <label class="label-fix-width" style="font-weight: normal;"></label> | ||||
| <button class="ui create_train_job green button"> | |||||
| <button class="ui create_train_job green button {{if eq .NotStopTaskCount 1}}disabled{{end}}"> | |||||
| {{.i18n.Tr "repo.cloudbrain.new"}} | {{.i18n.Tr "repo.cloudbrain.new"}} | ||||
| </button> | </button> | ||||
| <a class="ui button" href="/">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | <a class="ui button" href="/">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | ||||
| @@ -283,7 +285,7 @@ | |||||
| <div class="inline unite min_title field"> | <div class="inline unite min_title field"> | ||||
| <label class="label-fix-width" style="font-weight: normal;"></label> | <label class="label-fix-width" style="font-weight: normal;"></label> | ||||
| <button class="ui create_train_job green button"> | |||||
| <button class="ui create_train_job green button {{if eq .NotStopTaskCount 1}}disabled{{end}}"> | |||||
| {{.i18n.Tr "repo.cloudbrain.new"}} | {{.i18n.Tr "repo.cloudbrain.new"}} | ||||
| </button> | </button> | ||||
| <a class="ui button" href="/">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | <a class="ui button" href="/">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | ||||
| @@ -354,7 +356,7 @@ | |||||
| <div class="inline unite min_title field"> | <div class="inline unite min_title field"> | ||||
| <label class="label-fix-width" style="font-weight: normal;"></label> | <label class="label-fix-width" style="font-weight: normal;"></label> | ||||
| <button class="ui create_train_job green button"> | |||||
| <button class="ui create_train_job green button {{if eq .NotStopTaskCount 1}}disabled{{end}}"> | |||||
| {{.i18n.Tr "repo.cloudbrain.new"}} | {{.i18n.Tr "repo.cloudbrain.new"}} | ||||
| </button> | </button> | ||||
| <a class="ui button" href="/">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | <a class="ui button" href="/">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | ||||
| @@ -524,23 +524,29 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <div class="ui tab" data-tab="second{{$k}}"> | <div class="ui tab" data-tab="second{{$k}}"> | ||||
| <div> | |||||
| <div class="file-info"> | |||||
| <a id="{{.VersionName}}-log-down" | <a id="{{.VersionName}}-log-down" | ||||
| class='{{if $.canDownload}}ti-download-file{{else}}disabled{{end}}' | |||||
| class='{{if $.canDownload}}ti-download-file{{else}}disabled{{end}} file-info' | |||||
| href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain/{{.ID}}/download_log_file"> | href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain/{{.ID}}/download_log_file"> | ||||
| <i class="ri-download-cloud-2-line"></i> | <i class="ri-download-cloud-2-line"></i> | ||||
| <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | ||||
| </a> | </a> | ||||
| <div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" | |||||
| data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;" | |||||
| data-log-down="{{$.i18n.Tr "repo.modelarts.download_log"}}" data-href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain/{{.ID}}/download_log_file" | |||||
| data-scroll-top="{{$.i18n.Tr "repo.log_scroll_start"}}" data-scroll-bottom="{{$.i18n.Tr "repo.log_scroll_end"}}"> | |||||
| <i class="ri-aspect-ratio-line"></i> | |||||
| <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.fullscreen_log_file"}}</span> | |||||
| </div> | |||||
| </div> | </div> | ||||
| <div | <div | ||||
| style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | ||||
| <span> | <span> | ||||
| <a title="滚动到顶部" style="position: absolute; right: -32px;cursor: pointer;" | |||||
| <a title="{{$.i18n.Tr "repo.log_scroll_start"}}" style="position: absolute; right: -32px;cursor: pointer;" | |||||
| class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a> | class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a> | ||||
| </span> | </span> | ||||
| <span class="log-info-{{.VersionName}}"> | <span class="log-info-{{.VersionName}}"> | ||||
| <a title="滚动到底部" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||||
| <a title="{{$.i18n.Tr "repo.log_scroll_end"}}" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||||
| class="log_bottom" data-version="{{.VersionName}}"><i | class="log_bottom" data-version="{{.VersionName}}"><i | ||||
| class="icon-to-bottom"></i></a> | class="icon-to-bottom"></i></a> | ||||
| </span> | </span> | ||||
| @@ -549,12 +555,16 @@ | |||||
| </div> | </div> | ||||
| <div class="ui attached log log-scroll" id="log{{.VersionName}}" data-version="{{.VersionName}}" | <div class="ui attached log log-scroll" id="log{{.VersionName}}" data-version="{{.VersionName}}" | ||||
| style="height: 300px !important; overflow: auto;"> | style="height: 300px !important; overflow: auto;"> | ||||
| <div class="ui inverted active dimmer"> | |||||
| <div class="ui loader"></div> | |||||
| </div> | |||||
| <input type="hidden" name="end_line" value> | |||||
| <input type="hidden" name="start_line" value> | |||||
| <pre id="log_file{{.VersionName}}"></pre> | |||||
| <div class="ui inverted active dimmer"> | |||||
| <div class="ui loader"></div> | |||||
| </div> | |||||
| <input type="hidden" name="end_line" value> | |||||
| <input type="hidden" name="end_line-max" value> | |||||
| <input type="hidden" name="start_line-max" value> | |||||
| <input type="hidden" name="start_line-max-copy" value> | |||||
| <input type="hidden" name="start_line" value> | |||||
| <input type="hidden" name="init_log" value> | |||||
| <pre id="log_file{{.VersionName}}"></pre> | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| @@ -565,6 +575,7 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| {{template "custom/max_log" .}} | |||||
| {{end}} {{template "base/paginate" .}} | {{end}} {{template "base/paginate" .}} | ||||
| </div> | </div> | ||||
| <!-- 确认模态框 --> | <!-- 确认模态框 --> | ||||
| @@ -40,7 +40,9 @@ | |||||
| <div class="repository"> | <div class="repository"> | ||||
| {{template "repo/header" .}} | {{template "repo/header" .}} | ||||
| <div class="ui container"> | <div class="ui container"> | ||||
| {{template "base/alert" .}} | |||||
| {{if eq .NotStopTaskCount 0}} | |||||
| {{template "base/alert" .}} | |||||
| {{end}} | |||||
| {{template "custom/alert_cb" .}} | {{template "custom/alert_cb" .}} | ||||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-queue="{{.QueuesDetail}}" data-queue-start="{{.i18n.Tr "repo.wait_count_start"}}" data-queue-end="{{.i18n.Tr "repo.wait_count_end"}}"></div> | <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-queue="{{.QueuesDetail}}" data-queue-start="{{.i18n.Tr "repo.wait_count_start"}}" data-queue-end="{{.i18n.Tr "repo.wait_count_end"}}"></div> | ||||
| <h4 class="ui top attached header"> | <h4 class="ui top attached header"> | ||||
| @@ -272,7 +274,7 @@ | |||||
| <!-- 表单操作 --> | <!-- 表单操作 --> | ||||
| <div class="inline min_title field"> | <div class="inline min_title field"> | ||||
| <label class="label-fix-width" style="font-weight: normal;"></label> | <label class="label-fix-width" style="font-weight: normal;"></label> | ||||
| <button class="ui create_train_job green button"> | |||||
| <button class="ui create_train_job green button {{if eq .NotStopTaskCount 1}}disabled{{end}}"> | |||||
| {{.i18n.Tr "repo.cloudbrain.new"}} | {{.i18n.Tr "repo.cloudbrain.new"}} | ||||
| </button> | </button> | ||||
| <a class="ui button" href="{{.RepoLink}}/modelarts/inference-job">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | <a class="ui button" href="{{.RepoLink}}/modelarts/inference-job">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | ||||
| @@ -315,7 +315,7 @@ | |||||
| <td class="ti-text-form-content"> | <td class="ti-text-form-content"> | ||||
| <div class="text-span text-span-w"> | <div class="text-span text-span-w"> | ||||
| <span style="font-size: 12px;" class=""> | |||||
| <span style="font-size: 12px;" id="{{.VersionName}}-startTime"> | |||||
| {{if not (eq .StartTime 0)}} | {{if not (eq .StartTime 0)}} | ||||
| {{TimeSinceUnix1 .StartTime}} | {{TimeSinceUnix1 .StartTime}} | ||||
| {{else}} | {{else}} | ||||
| @@ -535,23 +535,29 @@ | |||||
| </div> | </div> | ||||
| <div class="ui tab" data-tab="third"> | <div class="ui tab" data-tab="third"> | ||||
| <div> | |||||
| <div class="file-info"> | |||||
| <a id="{{.VersionName}}-log-down" | <a id="{{.VersionName}}-log-down" | ||||
| class='{{if $.canDownload}}ti-download-file{{else}}disabled{{end}}' | |||||
| class='{{if $.canDownload}}ti-download-file{{else}}disabled{{end}} file-info' | |||||
| href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain/{{.ID}}/download_log_file"> | href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain/{{.ID}}/download_log_file"> | ||||
| <i class="ri-download-cloud-2-line"></i> | <i class="ri-download-cloud-2-line"></i> | ||||
| <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | ||||
| </a> | </a> | ||||
| <div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" | |||||
| data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;" | |||||
| data-log-down="{{$.i18n.Tr "repo.modelarts.download_log"}}" data-href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain/{{.ID}}/download_log_file" | |||||
| data-scroll-top="{{$.i18n.Tr "repo.log_scroll_start"}}" data-scroll-bottom="{{$.i18n.Tr "repo.log_scroll_end"}}"> | |||||
| <i class="ri-aspect-ratio-line"></i> | |||||
| <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.fullscreen_log_file"}}</span> | |||||
| </div> | |||||
| </div> | </div> | ||||
| <div | <div | ||||
| style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | ||||
| <span> | <span> | ||||
| <a title="滚动到顶部" style="position: absolute; right: -32px;cursor: pointer;" | |||||
| <a title="{{$.i18n.Tr "repo.log_scroll_start"}}" style="position: absolute; right: -32px;cursor: pointer;" | |||||
| class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a> | class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a> | ||||
| </span> | </span> | ||||
| <span class="log-info-{{.VersionName}}"> | <span class="log-info-{{.VersionName}}"> | ||||
| <a title="滚动到底部" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||||
| <a title="{{$.i18n.Tr "repo.log_scroll_end"}}" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||||
| class="log_bottom" data-version="{{.VersionName}}"><i | class="log_bottom" data-version="{{.VersionName}}"><i | ||||
| class="icon-to-bottom"></i></a> | class="icon-to-bottom"></i></a> | ||||
| </span> | </span> | ||||
| @@ -559,13 +565,17 @@ | |||||
| <div id="header"></div> | <div id="header"></div> | ||||
| </div> | </div> | ||||
| <div class="ui attached log log-scroll" id="log{{.VersionName}}" data-version="{{.VersionName}}" | <div class="ui attached log log-scroll" id="log{{.VersionName}}" data-version="{{.VersionName}}" | ||||
| style="height: 300px !important; overflow: auto;"> | |||||
| <div class="ui inverted active dimmer"> | |||||
| style="height: 300px !important; overflow: auto;"> | |||||
| <div class="ui inverted active dimmer"> | |||||
| <div class="ui loader"></div> | <div class="ui loader"></div> | ||||
| </div> | |||||
| <input type="hidden" name="end_line" value> | |||||
| <input type="hidden" name="start_line" value> | |||||
| <pre id="log_file{{.VersionName}}"></pre> | |||||
| </div> | |||||
| <input type="hidden" name="end_line" value> | |||||
| <input type="hidden" name="end_line-max" value> | |||||
| <input type="hidden" name="start_line-max" value> | |||||
| <input type="hidden" name="start_line-max-copy" value> | |||||
| <input type="hidden" name="start_line" value> | |||||
| <input type="hidden" name="init_log" value> | |||||
| <pre id="log_file{{.VersionName}}"></pre> | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| @@ -589,7 +599,7 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| {{template "custom/max_log" .}} | |||||
| {{end}} | {{end}} | ||||
| </div> | </div> | ||||
| @@ -24,7 +24,9 @@ | |||||
| <div class="repository new repo ui middle very relaxed page grid"> | <div class="repository new repo ui middle very relaxed page grid"> | ||||
| <div class="column"> | <div class="column"> | ||||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-queue="{{.QueuesDetail}}" data-queue-start="{{.i18n.Tr "repo.wait_count_start"}}" data-queue-end="{{.i18n.Tr "repo.wait_count_end"}}"></div> | <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-queue="{{.QueuesDetail}}" data-queue-start="{{.i18n.Tr "repo.wait_count_start"}}" data-queue-end="{{.i18n.Tr "repo.wait_count_end"}}"></div> | ||||
| {{template "base/alert" .}} | |||||
| {{if eq .NotStopTaskCount 0}} | |||||
| {{template "base/alert" .}} | |||||
| {{end}} | |||||
| <div class="ui negative message" id="messageInfo" style="display:none;"> | <div class="ui negative message" id="messageInfo" style="display:none;"> | ||||
| <p></p> | <p></p> | ||||
| </div> | </div> | ||||
| @@ -200,7 +202,7 @@ | |||||
| <div class="inline field"> | <div class="inline field"> | ||||
| <label></label> | <label></label> | ||||
| <button class="ui green button"> | |||||
| <button class="ui green button {{if eq .NotStopTaskCount 1}}disabled{{end}}"> | |||||
| {{.i18n.Tr "repo.cloudbrain.new"}} | {{.i18n.Tr "repo.cloudbrain.new"}} | ||||
| </button> | </button> | ||||
| <a class="ui button cancel" | <a class="ui button cancel" | ||||
| @@ -71,7 +71,9 @@ | |||||
| {{template "repo/header" .}} | {{template "repo/header" .}} | ||||
| <div class="ui container"> | <div class="ui container"> | ||||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}" data-queue="{{.QueuesDetail}}" data-queue-start="{{.i18n.Tr "repo.wait_count_start"}}" data-queue-end="{{.i18n.Tr "repo.wait_count_end"}}"></div> | <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}" data-queue="{{.QueuesDetail}}" data-queue-start="{{.i18n.Tr "repo.wait_count_start"}}" data-queue-end="{{.i18n.Tr "repo.wait_count_end"}}"></div> | ||||
| {{template "base/alert" .}} | |||||
| {{if eq .NotStopTaskCount 0}} | |||||
| {{template "base/alert" .}} | |||||
| {{end}} | |||||
| {{template "custom/alert_cb" .}} | {{template "custom/alert_cb" .}} | ||||
| <h4 class="ui top attached header"> | <h4 class="ui top attached header"> | ||||
| {{.i18n.Tr "repo.modelarts.train_job.new"}} | {{.i18n.Tr "repo.modelarts.train_job.new"}} | ||||
| @@ -232,7 +234,7 @@ | |||||
| <div class="inline field" style="padding: 1rem 0;"> | <div class="inline field" style="padding: 1rem 0;"> | ||||
| <label class="label-fix-width"></label> | <label class="label-fix-width"></label> | ||||
| <button class="ui create_train_job green button"> | |||||
| <button class="ui create_train_job green button {{if eq .NotStopTaskCount 1}}disabled{{end}}"> | |||||
| {{.i18n.Tr "repo.cloudbrain.new"}} | {{.i18n.Tr "repo.cloudbrain.new"}} | ||||
| </button> | </button> | ||||
| <a class="ui button __btn-cancel-back__" | <a class="ui button __btn-cancel-back__" | ||||
| @@ -331,7 +331,7 @@ | |||||
| <td class="ti-text-form-content"> | <td class="ti-text-form-content"> | ||||
| <div class="text-span text-span-w"> | <div class="text-span text-span-w"> | ||||
| <span style="font-size: 12px;"> | |||||
| <span style="font-size: 12px;" id="{{.VersionName}}-startTime"> | |||||
| {{if not (eq .StartTime 0)}} | {{if not (eq .StartTime 0)}} | ||||
| {{TimeSinceUnix1 .StartTime}} | {{TimeSinceUnix1 .StartTime}} | ||||
| {{else}} | {{else}} | ||||
| @@ -524,22 +524,28 @@ | |||||
| </div> | </div> | ||||
| <div class="ui tab" data-tab="third{{$k}}"> | <div class="ui tab" data-tab="third{{$k}}"> | ||||
| <div> | |||||
| <div class="file-info"> | |||||
| <a id="{{.VersionName}}-log-down" | <a id="{{.VersionName}}-log-down" | ||||
| class='{{if $.canDownload}}ti-download-file{{else}}disabled{{end}}' | |||||
| class='{{if $.canDownload}}ti-download-file{{else}}disabled{{end}} file-info' | |||||
| href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain/{{.ID}}/download_log_file"> | href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain/{{.ID}}/download_log_file"> | ||||
| <i class="ri-download-cloud-2-line"></i> | <i class="ri-download-cloud-2-line"></i> | ||||
| <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | ||||
| </a> | </a> | ||||
| <div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" | |||||
| data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;" | |||||
| data-log-down="{{$.i18n.Tr "repo.modelarts.download_log"}}" data-href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain/{{.ID}}/download_log_file" | |||||
| data-scroll-top="{{$.i18n.Tr "repo.log_scroll_start"}}" data-scroll-bottom="{{$.i18n.Tr "repo.log_scroll_end"}}"> | |||||
| <i class="ri-aspect-ratio-line"></i> | |||||
| <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.fullscreen_log_file"}}</span> | |||||
| </div> | |||||
| </div> | </div> | ||||
| <div style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | <div style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | ||||
| <span> | <span> | ||||
| <a title="滚动到顶部" style="position: absolute; right: -32px;cursor: pointer;" | |||||
| <a title="{{$.i18n.Tr "repo.log_scroll_start"}}" style="position: absolute; right: -32px;cursor: pointer;" | |||||
| class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a> | class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a> | ||||
| </span> | </span> | ||||
| <span class="log-info-{{.VersionName}}"> | <span class="log-info-{{.VersionName}}"> | ||||
| <a title="滚动到底部" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||||
| <a title="{{$.i18n.Tr "repo.log_scroll_end"}}" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||||
| class="log_bottom" data-version="{{.VersionName}}"><i | class="log_bottom" data-version="{{.VersionName}}"><i | ||||
| class="icon-to-bottom"></i></a> | class="icon-to-bottom"></i></a> | ||||
| </span> | </span> | ||||
| @@ -552,7 +558,11 @@ | |||||
| <div class="ui loader"></div> | <div class="ui loader"></div> | ||||
| </div> | </div> | ||||
| <input type="hidden" name="end_line" value> | <input type="hidden" name="end_line" value> | ||||
| <input type="hidden" name="end_line-max" value> | |||||
| <input type="hidden" name="start_line-max" value> | |||||
| <input type="hidden" name="start_line-max-copy" value> | |||||
| <input type="hidden" name="start_line" value> | <input type="hidden" name="start_line" value> | ||||
| <input type="hidden" name="init_log" value> | |||||
| <pre id="log_file{{.VersionName}}"></pre> | <pre id="log_file{{.VersionName}}"></pre> | ||||
| </div> | </div> | ||||
| @@ -581,6 +591,7 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| {{template "custom/max_log" .}} | |||||
| {{end}} {{template "base/paginate" .}} | {{end}} {{template "base/paginate" .}} | ||||
| </div> | </div> | ||||
| <!-- 确认模态框 --> | <!-- 确认模态框 --> | ||||
| @@ -419,6 +419,18 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <div class="ui modal debug-again-alert"> | |||||
| <div class="ui message" style="background-color: rgba(242, 113, 28, 0.05);border: 1px solid rgba(242, 113, 28, 1);border-radius: 5px;"> | |||||
| <div style="display: flex;align-items: center;"> | |||||
| <i class="ri-information-line" style="font-size: 35px;color: rgba(242, 113, 28, 1);;"></i> | |||||
| <div style="text-align: left;margin-left: 1rem;"> | |||||
| <div style="font-weight: 600;line-height: 2;">{{.i18n.Tr "repo.cloudbrain.morethanonejob1" | Safe }}</div> | |||||
| <div style="color:#939393">{{.i18n.Tr "repo.cloudbrain.morethanonejob2" | Safe}}</div> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| {{template "base/footer" .}} | {{template "base/footer" .}} | ||||
| <script> | <script> | ||||
| // 调试和评分新开窗口 | // 调试和评分新开窗口 | ||||
| @@ -63,7 +63,9 @@ | |||||
| {{template "repo/header" .}} | {{template "repo/header" .}} | ||||
| <div class="ui container"> | <div class="ui container"> | ||||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}"></div> | <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}"></div> | ||||
| {{template "base/alert" .}} | |||||
| {{if eq .NotStopTaskCount 0}} | |||||
| {{template "base/alert" .}} | |||||
| {{end}} | |||||
| {{template "custom/alert_cb" .}} | {{template "custom/alert_cb" .}} | ||||
| <h4 class="ui top attached header"> | <h4 class="ui top attached header"> | ||||
| {{.i18n.Tr "repo.modelarts.train_job.new"}} | {{.i18n.Tr "repo.modelarts.train_job.new"}} | ||||
| @@ -205,7 +207,7 @@ | |||||
| <div class="inline min_title field"> | <div class="inline min_title field"> | ||||
| <label class="label-fix-width"></label> | <label class="label-fix-width"></label> | ||||
| <button class="ui create_train_job green button"> | |||||
| <button class="ui create_train_job green button {{if eq .NotStopTaskCount 1}}disabled{{end}}"> | |||||
| {{.i18n.Tr "repo.cloudbrain.new"}} | {{.i18n.Tr "repo.cloudbrain.new"}} | ||||
| </button> | </button> | ||||
| <a class="ui button __btn-cancel-back__" href="{{.RepoLink}}/modelarts/train-job">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | <a class="ui button __btn-cancel-back__" href="{{.RepoLink}}/modelarts/train-job">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | ||||
| @@ -58,7 +58,9 @@ | |||||
| {{template "repo/header" .}} | {{template "repo/header" .}} | ||||
| <div class="ui container"> | <div class="ui container"> | ||||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}"></div> | <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}"></div> | ||||
| {{template "base/alert" .}} | |||||
| {{if eq .NotStopTaskCount 0}} | |||||
| {{template "base/alert" .}} | |||||
| {{end}} | |||||
| {{template "custom/alert_cb" .}} | {{template "custom/alert_cb" .}} | ||||
| <h4 class="ui top attached header"> | <h4 class="ui top attached header"> | ||||
| {{.i18n.Tr "repo.modelarts.train_job.new"}} | {{.i18n.Tr "repo.modelarts.train_job.new"}} | ||||
| @@ -229,7 +231,7 @@ | |||||
| <div class="inline min_title field"> | <div class="inline min_title field"> | ||||
| <label class="label-fix-width"></label> | <label class="label-fix-width"></label> | ||||
| <button class="ui create_train_job green button"> | |||||
| <button class="ui create_train_job green button {{if eq .NotStopTaskCount 1}}disabled{{end}}"> | |||||
| {{.i18n.Tr "repo.cloudbrain.new"}} | {{.i18n.Tr "repo.cloudbrain.new"}} | ||||
| </button> | </button> | ||||
| <a class="ui button __btn-cancel-back__" href="{{.RepoLink}}/modelarts/train-job">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | <a class="ui button __btn-cancel-back__" href="{{.RepoLink}}/modelarts/train-job">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | ||||
| @@ -330,7 +330,7 @@ | |||||
| <td class="ti-text-form-content"> | <td class="ti-text-form-content"> | ||||
| <div class="text-span text-span-w"> | <div class="text-span text-span-w"> | ||||
| <span style="font-size: 12px;" class=""> | |||||
| <span style="font-size: 12px;" id="{{.VersionName}}-startTime"> | |||||
| {{if not (eq .StartTime 0)}} | {{if not (eq .StartTime 0)}} | ||||
| {{TimeSinceUnix1 .StartTime}} | {{TimeSinceUnix1 .StartTime}} | ||||
| {{else}} | {{else}} | ||||
| @@ -520,14 +520,20 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <div class="ui tab" data-tab="second{{$k}}"> | <div class="ui tab" data-tab="second{{$k}}"> | ||||
| <div> | |||||
| <div class="file-info"> | |||||
| <a id="{{.VersionName}}-log-down" | <a id="{{.VersionName}}-log-down" | ||||
| class='{{if $.canDownload}}ti-download-file{{else}}disabled{{end}}' | |||||
| class='{{if $.canDownload}}ti-download-file{{else}}disabled{{end}} file-info' | |||||
| href="/api/v1/repos/{{$.RepoRelPath}}/grampus/train-job/{{.JobID}}/download_log"> | href="/api/v1/repos/{{$.RepoRelPath}}/grampus/train-job/{{.JobID}}/download_log"> | ||||
| <i class="ri-download-cloud-2-line"></i> | <i class="ri-download-cloud-2-line"></i> | ||||
| <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | ||||
| </a> | </a> | ||||
| <div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" data-log-type="c2Net" | |||||
| data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;" | |||||
| data-log-down="{{$.i18n.Tr "repo.modelarts.download_log"}}" data-href="/api/v1/repos/{{$.RepoRelPath}}/grampus/train-job/{{.JobID}}/download_log" | |||||
| data-scroll-top="{{$.i18n.Tr "repo.log_scroll_start"}}" data-scroll-bottom="{{$.i18n.Tr "repo.log_scroll_end"}}"> | |||||
| <i class="ri-aspect-ratio-line"></i> | |||||
| <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.fullscreen_log_file"}}</span> | |||||
| </div> | |||||
| </div> | </div> | ||||
| <div style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | <div style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | ||||
| <span> | <span> | ||||
| @@ -548,7 +554,11 @@ | |||||
| <div class="ui loader"></div> | <div class="ui loader"></div> | ||||
| </div> | </div> | ||||
| <input type="hidden" name="end_line" value> | <input type="hidden" name="end_line" value> | ||||
| <input type="hidden" name="start_line" value> | |||||
| <input type="hidden" name="end_line-max" value> | |||||
| <input type="hidden" name="start_line-max" value> | |||||
| <input type="hidden" name="start_line-max-copy" value> | |||||
| <input type="hidden" name="start_line" value> | |||||
| <input type="hidden" name="init_log" value> | |||||
| <pre id="log_file{{.VersionName}}"></pre> | <pre id="log_file{{.VersionName}}"></pre> | ||||
| </div> | </div> | ||||
| @@ -576,6 +586,7 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| {{template "custom/max_log" .}} | |||||
| {{end}} {{template "base/paginate" .}} | {{end}} {{template "base/paginate" .}} | ||||
| </div> | </div> | ||||
| <!-- 确认模态框 --> | <!-- 确认模态框 --> | ||||
| @@ -39,7 +39,9 @@ | |||||
| <div class="repository"> | <div class="repository"> | ||||
| {{template "repo/header" .}} | {{template "repo/header" .}} | ||||
| <div class="ui container"> | <div class="ui container"> | ||||
| {{template "base/alert" .}} | |||||
| {{if eq .NotStopTaskCount 0}} | |||||
| {{template "base/alert" .}} | |||||
| {{end}} | |||||
| {{template "custom/alert_cb" .}} | {{template "custom/alert_cb" .}} | ||||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="1" data-repo-link="{{.RepoLink}}"></div> | <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="1" data-repo-link="{{.RepoLink}}"></div> | ||||
| <h4 class="ui top attached header"> | <h4 class="ui top attached header"> | ||||
| @@ -300,7 +302,7 @@ | |||||
| <!-- 表单操作 --> | <!-- 表单操作 --> | ||||
| <div class="inline min_title field"> | <div class="inline min_title field"> | ||||
| <label class="label-fix-width" style="font-weight: normal;"></label> | <label class="label-fix-width" style="font-weight: normal;"></label> | ||||
| <button class="ui create_train_job green button"> | |||||
| <button class="ui create_train_job green button {{if eq .NotStopTaskCount 1}}disabled{{end}}"> | |||||
| {{.i18n.Tr "repo.cloudbrain.new"}} | {{.i18n.Tr "repo.cloudbrain.new"}} | ||||
| </button> | </button> | ||||
| <a class="ui button" href="/">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | <a class="ui button" href="/">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | ||||
| @@ -263,7 +263,7 @@ td, th { | |||||
| <td class="ti-text-form-content"> | <td class="ti-text-form-content"> | ||||
| <div class="text-span text-span-w"> | <div class="text-span text-span-w"> | ||||
| <span style="font-size: 12px;" class=""> | |||||
| <span style="font-size: 12px;" id="{{.VersionName}}-startTime"> | |||||
| {{if not (eq .StartTime 0)}} | {{if not (eq .StartTime 0)}} | ||||
| {{TimeSinceUnix1 .StartTime}} | {{TimeSinceUnix1 .StartTime}} | ||||
| {{else}} | {{else}} | ||||
| @@ -460,22 +460,28 @@ td, th { | |||||
| </div> | </div> | ||||
| <div class="ui tab" data-tab="second"> | <div class="ui tab" data-tab="second"> | ||||
| <div> | |||||
| <div class="file-info"> | |||||
| <a id="{{.VersionName}}-log-down" | <a id="{{.VersionName}}-log-down" | ||||
| class='{{if and ($.canDownload) (eq .Status "KILLED" "FAILED" "START_FAILED" "STOPPED" "COMPLETED") }}ti-download-file{{else}}disabled{{end}}' | |||||
| class='{{if and ($.canDownload) (eq .Status "KILLED" "FAILED" "START_FAILED" "STOPPED" "COMPLETED") }}ti-download-file{{else}}disabled{{end}} file-info' | |||||
| href="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/download_log_file?version_name={{.VersionName}}"> | href="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/download_log_file?version_name={{.VersionName}}"> | ||||
| <i class="ri-download-cloud-2-line"></i> | <i class="ri-download-cloud-2-line"></i> | ||||
| <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | ||||
| </a> | </a> | ||||
| <div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" | |||||
| data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;" | |||||
| data-log-down="{{$.i18n.Tr "repo.modelarts.download_log"}}" data-href="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/download_log_file?version_name={{.VersionName}}" | |||||
| data-scroll-top="{{$.i18n.Tr "repo.log_scroll_start"}}" data-scroll-bottom="{{$.i18n.Tr "repo.log_scroll_end"}}"> | |||||
| <i class="ri-aspect-ratio-line"></i> | |||||
| <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.fullscreen_log_file"}}</span> | |||||
| </div> | |||||
| </div> | </div> | ||||
| <div style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | <div style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | ||||
| <span> | <span> | ||||
| <a title="滚动到顶部" style="position: absolute; right: -32px;cursor: pointer;" | |||||
| <a title="{{$.i18n.Tr "repo.log_scroll_start"}}" style="position: absolute; right: -32px;cursor: pointer;" | |||||
| class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a> | class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a> | ||||
| </span> | </span> | ||||
| <span class="log-info-{{.VersionName}}"> | <span class="log-info-{{.VersionName}}"> | ||||
| <a title="滚动到底部" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||||
| <a title="{{$.i18n.Tr "repo.log_scroll_end"}}" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||||
| class="log_bottom" data-version="{{.VersionName}}"><i | class="log_bottom" data-version="{{.VersionName}}"><i | ||||
| class="icon-to-bottom"></i></a> | class="icon-to-bottom"></i></a> | ||||
| </span> | </span> | ||||
| @@ -486,8 +492,12 @@ td, th { | |||||
| <div class="ui inverted active dimmer"> | <div class="ui inverted active dimmer"> | ||||
| <div class="ui loader"></div> | <div class="ui loader"></div> | ||||
| </div> | </div> | ||||
| <input type="hidden" name="end_line" value> | |||||
| <input type="hidden" name="start_line" value> | |||||
| <input type="hidden" name="end_line" value> | |||||
| <input type="hidden" name="end_line-max" value> | |||||
| <input type="hidden" name="start_line-max" value> | |||||
| <input type="hidden" name="start_line-max-copy" value> | |||||
| <input type="hidden" name="start_line" value> | |||||
| <input type="hidden" name="init_log" value> | |||||
| <pre id="log_file{{.VersionName}}"></pre> | <pre id="log_file{{.VersionName}}"></pre> | ||||
| </div> | </div> | ||||
| @@ -510,12 +520,8 @@ td, th { | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| {{template "custom/max_log" .}} | |||||
| {{end}} | {{end}} | ||||
| </div> | </div> | ||||
| <!-- 确认模态框 --> | <!-- 确认模态框 --> | ||||
| </div> | </div> | ||||
| @@ -11,11 +11,12 @@ | |||||
| <div class="repository new repo ui middle very relaxed page grid"> | <div class="repository new repo ui middle very relaxed page grid"> | ||||
| <div class="column"> | <div class="column"> | ||||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}"></div> | <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}"></div> | ||||
| {{template "base/alert" .}} | |||||
| <div class="ui negative message" id="messageInfo"> | |||||
| {{if eq .NotStopTaskCount 0}} | |||||
| {{template "base/alert" .}} | |||||
| {{end}} | |||||
| <div class="ui negative message" id="messageInfo" style="display: none;"> | |||||
| <p></p> | <p></p> | ||||
| </div> | </div> | ||||
| {{template "custom/alert_cb" .}} | {{template "custom/alert_cb" .}} | ||||
| <form class="ui form" id="form_id" action="{{.Link}}" method="post"> | <form class="ui form" id="form_id" action="{{.Link}}" method="post"> | ||||
| {{.CsrfTokenHtml}} | {{.CsrfTokenHtml}} | ||||
| @@ -106,7 +107,7 @@ | |||||
| </div> | </div> | ||||
| <div class="inline field"> | <div class="inline field"> | ||||
| <label></label> | <label></label> | ||||
| <button class="ui green button"> | |||||
| <button class="ui green button {{if eq .NotStopTaskCount 1}}disabled{{end}}"> | |||||
| {{.i18n.Tr "repo.cloudbrain.new"}} | {{.i18n.Tr "repo.cloudbrain.new"}} | ||||
| </button> | </button> | ||||
| <a class="ui button cancel" href="{{.RepoLink}}/debugjob?debugListType=all">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | <a class="ui button cancel" href="{{.RepoLink}}/debugjob?debugListType=all">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | ||||
| @@ -63,7 +63,9 @@ | |||||
| {{template "repo/header" .}} | {{template "repo/header" .}} | ||||
| <div class="ui container"> | <div class="ui container"> | ||||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}"></div> | <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}"></div> | ||||
| {{template "base/alert" .}} | |||||
| {{if eq .NotStopTaskCount 0}} | |||||
| {{template "base/alert" .}} | |||||
| {{end}} | |||||
| {{template "custom/alert_cb" .}} | {{template "custom/alert_cb" .}} | ||||
| <h4 class="ui top attached header"> | <h4 class="ui top attached header"> | ||||
| {{.i18n.Tr "repo.modelarts.train_job.new"}} | {{.i18n.Tr "repo.modelarts.train_job.new"}} | ||||
| @@ -291,7 +293,7 @@ | |||||
| <div class="inline field" style="padding: 1rem 0;"> | <div class="inline field" style="padding: 1rem 0;"> | ||||
| <label class="label-fix-width"></label> | <label class="label-fix-width"></label> | ||||
| <button class="ui create_train_job green button"> | |||||
| <button class="ui create_train_job green button {{if eq .NotStopTaskCount 1}}disabled{{end}}"> | |||||
| {{.i18n.Tr "repo.cloudbrain.new"}} | {{.i18n.Tr "repo.cloudbrain.new"}} | ||||
| </button> | </button> | ||||
| <a class="ui button" href="{{.RepoLink}}/modelarts/train-job">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | <a class="ui button" href="{{.RepoLink}}/modelarts/train-job">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | ||||
| @@ -370,7 +370,7 @@ | |||||
| <td class="ti-text-form-content"> | <td class="ti-text-form-content"> | ||||
| <div class="text-span text-span-w"> | <div class="text-span text-span-w"> | ||||
| <span style="font-size: 12px;" class=""> | |||||
| <span style="font-size: 12px;" id="{{.VersionName}}-startTime"> | |||||
| {{if not (eq .Cloudbrain.StartTime 0)}} | {{if not (eq .Cloudbrain.StartTime 0)}} | ||||
| {{TimeSinceUnix1 .Cloudbrain.StartTime}} | {{TimeSinceUnix1 .Cloudbrain.StartTime}} | ||||
| {{else}} | {{else}} | ||||
| @@ -548,14 +548,20 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <div class="ui tab" data-tab="second{{$k}}"> | <div class="ui tab" data-tab="second{{$k}}"> | ||||
| <div> | |||||
| <div class="file-info"> | |||||
| <a id="{{.VersionName}}-log-down" | <a id="{{.VersionName}}-log-down" | ||||
| class='{{if and (.CanModify) (eq .Status "KILLED" "FAILED" "START_FAILED" "STOPPED" "COMPLETED") }}ti-download-file{{else}}disabled{{end}}' | |||||
| class='{{if and (.CanModify) (eq .Status "KILLED" "FAILED" "START_FAILED" "STOPPED" "COMPLETED") }}ti-download-file{{else}}disabled{{end}} file-info' | |||||
| href="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/download_log_file?version_name={{.VersionName}}"> | href="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/download_log_file?version_name={{.VersionName}}"> | ||||
| <i class="ri-download-cloud-2-line"></i> | <i class="ri-download-cloud-2-line"></i> | ||||
| <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | ||||
| </a> | </a> | ||||
| <div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" | |||||
| data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;" | |||||
| data-log-down="{{$.i18n.Tr "repo.modelarts.download_log"}}" data-href="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/download_log_file?version_name={{.VersionName}}" | |||||
| data-scroll-top="{{$.i18n.Tr "repo.log_scroll_start"}}" data-scroll-bottom="{{$.i18n.Tr "repo.log_scroll_end"}}"> | |||||
| <i class="ri-aspect-ratio-line"></i> | |||||
| <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.fullscreen_log_file"}}</span> | |||||
| </div> | |||||
| </div> | </div> | ||||
| <div | <div | ||||
| style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | ||||
| @@ -577,7 +583,11 @@ | |||||
| <div class="ui loader"></div> | <div class="ui loader"></div> | ||||
| </div> | </div> | ||||
| <input type="hidden" name="end_line" value> | <input type="hidden" name="end_line" value> | ||||
| <input type="hidden" name="end_line-max" value> | |||||
| <input type="hidden" name="start_line-max" value> | |||||
| <input type="hidden" name="start_line-max-copy" value> | |||||
| <input type="hidden" name="start_line" value> | <input type="hidden" name="start_line" value> | ||||
| <input type="hidden" name="init_log" value> | |||||
| <pre id="log_file{{.VersionName}}"></pre> | <pre id="log_file{{.VersionName}}"></pre> | ||||
| </div> | </div> | ||||
| @@ -609,6 +619,7 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| {{template "custom/max_log" .}} | |||||
| {{end}} {{template "base/paginate" .}} | {{end}} {{template "base/paginate" .}} | ||||
| </div> | </div> | ||||
| <!-- 确认模态框 --> | <!-- 确认模态框 --> | ||||
| @@ -715,6 +726,7 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| {{template "base/footer" .}} | {{template "base/footer" .}} | ||||
| @@ -56,7 +56,10 @@ | |||||
| {{template "repo/header" .}} | {{template "repo/header" .}} | ||||
| <div class="ui container"> | <div class="ui container"> | ||||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}"></div> | <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}"></div> | ||||
| {{template "base/alert" .}} | |||||
| {{if eq .NotStopTaskCount 0}} | |||||
| {{template "base/alert" .}} | |||||
| {{end}} | |||||
| {{template "custom/alert_cb" .}} | |||||
| <h4 class="ui top attached header"> | <h4 class="ui top attached header"> | ||||
| {{.i18n.Tr "repo.modelarts.train_job.new"}} | {{.i18n.Tr "repo.modelarts.train_job.new"}} | ||||
| </h4> | </h4> | ||||
| @@ -260,7 +263,7 @@ | |||||
| </div> | </div> | ||||
| <div class="inline unite min_title field"> | <div class="inline unite min_title field"> | ||||
| <button class="ui create_train_job green button"> | |||||
| <button class="ui create_train_job green button {{if eq .NotStopTaskCount 1}}disabled{{end}}"> | |||||
| {{.i18n.Tr "repo.cloudbrain.new"}} | {{.i18n.Tr "repo.cloudbrain.new"}} | ||||
| </button> | </button> | ||||
| <a class="ui button cancel" href="{{.RepoLink}}/modelarts/train-job">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | <a class="ui button cancel" href="{{.RepoLink}}/modelarts/train-job">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | ||||
| @@ -103,7 +103,7 @@ | |||||
| </span> | </span> | ||||
| </div> | </div> | ||||
| <div class="two wide column center padding0"> | <div class="two wide column center padding0"> | ||||
| <span style="font-size: 12px;">{{if eq .SrcEngine 0}}PyTorch {{else if eq .SrcEngine 1}}TensorFlow{{else if eq .SrcEngine 2}}MindSpore {{end}}</span> | |||||
| <span style="font-size: 12px;">{{if eq .SrcEngine 0}}PyTorch {{else if eq .SrcEngine 1}}TensorFlow {{else if eq .SrcEngine 2}}MindSpore {{else if eq .SrcEngine 4}}PaddlePaddle {{else if eq .SrcEngine 6}}MXNet {{end}}</span> | |||||
| </div> | </div> | ||||
| <div class="two wide column center padding0"> | <div class="two wide column center padding0"> | ||||
| <span style="font-size: 12px;">{{if eq .DestFormat 0}}ONNX {{else if eq .DestFormat 1}}TensorRT {{end}}</span> | <span style="font-size: 12px;">{{if eq .DestFormat 0}}ONNX {{else if eq .DestFormat 1}}TensorRT {{end}}</span> | ||||
| @@ -532,7 +532,7 @@ | |||||
| } | } | ||||
| } | } | ||||
| function isModel(filename){ | function isModel(filename){ | ||||
| var postfix=[".pth",".pkl",".onnx",".mindir",".ckpt",".pb"]; | |||||
| var postfix=[".pth",".pkl",".onnx",".mindir",".ckpt",".pb",".pdmodel",".pdiparams",".params",".json"]; | |||||
| for(var i =0; i<postfix.length;i++){ | for(var i =0; i<postfix.length;i++){ | ||||
| if(filename.substring(filename.length-postfix[i].length)==postfix[i]){ | if(filename.substring(filename.length-postfix[i].length)==postfix[i]){ | ||||
| return true; | return true; | ||||
| @@ -568,6 +568,9 @@ | |||||
| html +="<option name=\"PyTorch\" " + getSelected(0,value) + " value=\"0\">PyTorch</option>"; | html +="<option name=\"PyTorch\" " + getSelected(0,value) + " value=\"0\">PyTorch</option>"; | ||||
| html +="<option name=\"TensorFlow\" " + getSelected(1,value) + " value=\"1\">TensorFlow</option>"; | html +="<option name=\"TensorFlow\" " + getSelected(1,value) + " value=\"1\">TensorFlow</option>"; | ||||
| html +="<option name=\"MindSpore\" " + getSelected(2,value) + " value=\"2\">MindSpore</option>"; | html +="<option name=\"MindSpore\" " + getSelected(2,value) + " value=\"2\">MindSpore</option>"; | ||||
| html +="<option name=\"PaddlePaddle\" " + getSelected(4,value) + " value=\"4\">PaddlePaddle</option>"; | |||||
| html +="<option name=\"MXNet\" " + getSelected(6,value) + " value=\"6\">MXNet</option>"; | |||||
| $('#srcEngine').html(html); | $('#srcEngine').html(html); | ||||
| srcEngineChanged(); | srcEngineChanged(); | ||||
| } | } | ||||
| @@ -355,7 +355,7 @@ td, th { | |||||
| <td class="ti-text-form-content"> | <td class="ti-text-form-content"> | ||||
| <div class="text-span text-span-w"> | <div class="text-span text-span-w"> | ||||
| {{if eq .SrcEngine 0}}PyTorch {{else if eq .SrcEngine 1}}Tensorflow{{else if eq .SrcEngine 2}}MindSpore {{end}} | |||||
| {{if eq .SrcEngine 0}}PyTorch {{else if eq .SrcEngine 1}}Tensorflow{{else if eq .SrcEngine 2}}MindSpore {{else if eq .SrcEngine 4}}PaddlePaddle{{else if eq .SrcEngine 6}} MXNet{{end}} | |||||
| </div> | </div> | ||||
| </td> | </td> | ||||
| </tr> | </tr> | ||||
| @@ -568,4 +568,5 @@ | |||||
| $("#choice_Engine").removeClass('disabled'); | $("#choice_Engine").removeClass('disabled'); | ||||
| } | } | ||||
| } | } | ||||
| </script> | </script> | ||||
| @@ -55,7 +55,9 @@ | |||||
| <div class="ui container"> | <div class="ui container"> | ||||
| {{$Grampus := (or (eq (index (SubJumpablePath .Link) 1) "create_grampus_gpu") (eq (index (SubJumpablePath .Link) 1) "create_grampus_npu"))}} | {{$Grampus := (or (eq (index (SubJumpablePath .Link) 1) "create_grampus_gpu") (eq (index (SubJumpablePath .Link) 1) "create_grampus_npu"))}} | ||||
| <div class="cloudbrain-type" style="display: none;" data-grampus="{{$Grampus}}" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}" data-queue="{{.QueuesDetail}}" data-queue-start="{{.i18n.Tr "repo.wait_count_start"}}" data-queue-end="{{.i18n.Tr "repo.wait_count_end"}}"></div> | <div class="cloudbrain-type" style="display: none;" data-grampus="{{$Grampus}}" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}" data-queue="{{.QueuesDetail}}" data-queue-start="{{.i18n.Tr "repo.wait_count_start"}}" data-queue-end="{{.i18n.Tr "repo.wait_count_end"}}"></div> | ||||
| {{template "base/alert" .}} | |||||
| {{if eq .NotStopTaskCount 0}} | |||||
| {{template "base/alert" .}} | |||||
| {{end}} | |||||
| {{template "custom/alert_cb" .}} | {{template "custom/alert_cb" .}} | ||||
| <h4 class="ui top attached header"> | <h4 class="ui top attached header"> | ||||
| {{.i18n.Tr "repo.modelarts.evaluate_job.new_job"}} | {{.i18n.Tr "repo.modelarts.evaluate_job.new_job"}} | ||||
| @@ -314,7 +316,7 @@ | |||||
| </div> | </div> | ||||
| <div class="inline unite min_title field"> | <div class="inline unite min_title field"> | ||||
| <label class="label-fix-width" style="font-weight: normal;"></label> | <label class="label-fix-width" style="font-weight: normal;"></label> | ||||
| <button class="ui create_train_job green button"> | |||||
| <button class="ui create_train_job green button {{if eq .NotStopTaskCount 1}}disabled{{end}}"> | |||||
| {{.i18n.Tr "repo.cloudbrain.new"}} | {{.i18n.Tr "repo.cloudbrain.new"}} | ||||
| </button> | </button> | ||||
| <a class="ui button" href="{{.RepoLink}}/cloudbrain/benchmark">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | <a class="ui button" href="{{.RepoLink}}/cloudbrain/benchmark">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | ||||
| @@ -467,22 +467,29 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <div class="ui tab" data-tab="second0"> | <div class="ui tab" data-tab="second0"> | ||||
| <div> | |||||
| <div class="file-info"> | |||||
| <a id="-log-down" | <a id="-log-down" | ||||
| class='{{if $.canDownload}}ti-download-file{{else}}disabled{{end}}' | |||||
| class='{{if $.canDownload}}ti-download-file{{else}}disabled{{end}} file-info' | |||||
| href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain"> | href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain"> | ||||
| <i class="ri-download-cloud-2-line"></i> | <i class="ri-download-cloud-2-line"></i> | ||||
| <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | ||||
| </a> | </a> | ||||
| <div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" | |||||
| data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;" | |||||
| data-log-down="{{$.i18n.Tr "repo.modelarts.download_log"}}" data-href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain" | |||||
| data-scroll-top="{{$.i18n.Tr "repo.log_scroll_start"}}" data-scroll-bottom="{{$.i18n.Tr "repo.log_scroll_end"}}"> | |||||
| <i class="ri-aspect-ratio-line"></i> | |||||
| <span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.fullscreen_log_file"}}</span> | |||||
| </div> | |||||
| </div> | </div> | ||||
| <div | <div | ||||
| style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | ||||
| <span> | <span> | ||||
| <a title="滚动到顶部" style="position: absolute; right: -32px;cursor: pointer;" | |||||
| <a title="{{$.i18n.Tr "repo.log_scroll_start"}}" style="position: absolute; right: -32px;cursor: pointer;" | |||||
| class="log_top" data-version=""><i class="icon-to-top"></i></a> | class="log_top" data-version=""><i class="icon-to-top"></i></a> | ||||
| </span> | </span> | ||||
| <span class="log-info-"> | <span class="log-info-"> | ||||
| <a title="滚动到底部" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||||
| <a title="{{$.i18n.Tr "repo.log_scroll_end"}}" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||||
| class="log_bottom" data-version=""><i | class="log_bottom" data-version=""><i | ||||
| class="icon-to-bottom"></i></a> | class="icon-to-bottom"></i></a> | ||||
| </span> | </span> | ||||
| @@ -490,13 +497,17 @@ | |||||
| <div id="header"></div> | <div id="header"></div> | ||||
| </div> | </div> | ||||
| <div class="ui attached log log-scroll" id="log" data-version="" | <div class="ui attached log log-scroll" id="log" data-version="" | ||||
| style="height: 300px !important; overflow: auto;"> | |||||
| <div class="ui inverted active dimmer"> | |||||
| <div class="ui loader"></div> | |||||
| </div> | |||||
| <input type="hidden" name="end_line" value> | |||||
| <input type="hidden" name="start_line" value> | |||||
| <pre id="log_file"></pre> | |||||
| style="height: 300px !important; overflow: auto;"> | |||||
| <div class="ui inverted active dimmer"> | |||||
| <div class="ui loader"></div> | |||||
| </div> | |||||
| <input type="hidden" name="end_line" value> | |||||
| <input type="hidden" name="end_line-max" value> | |||||
| <input type="hidden" name="start_line-max" value> | |||||
| <input type="hidden" name="start_line-max-copy" value> | |||||
| <input type="hidden" name="start_line" value> | |||||
| <input type="hidden" name="init_log" value> | |||||
| <pre id="log_file"></pre> | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| @@ -571,6 +582,7 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| {{template "custom/max_log" .}} | |||||
| {{template "base/paginate" .}} | {{template "base/paginate" .}} | ||||
| </div> | </div> | ||||
| <!-- 确认模态框 --> | <!-- 确认模态框 --> | ||||
| @@ -65,7 +65,7 @@ | |||||
| <td> | <td> | ||||
| ${el.dockerDir} | ${el.dockerDir} | ||||
| </td> | </td> | ||||
| <td><a> ${el.example_repo}</a></td> | |||||
| <td><a href="${el.example_repo}">${el.example_repo}</a></td> | |||||
| <td>${el.note}</td></tr>`; | <td>${el.note}</td></tr>`; | ||||
| }); | }); | ||||
| html2 = html2.replace( | html2 = html2.replace( | ||||
| @@ -1,7 +1,7 @@ | |||||
| {{template "base/head" .}} | {{template "base/head" .}} | ||||
| <!-- 提示框 --> | <!-- 提示框 --> | ||||
| <script src="{{StaticUrlPrefix}}/js/specsuse.js?v={{MD5 AppVer}}" type="text/javascript"></script> | |||||
| <div class="alert"></div> | <div class="alert"></div> | ||||
| <script src="{{StaticUrlPrefix}}/js/specsuse.js?v={{MD5 AppVer}}" type="text/javascript"></script> | |||||
| <div class="explore users"> | <div class="explore users"> | ||||
| <div class="cloudbrain_debug" style="display: none;" data-debug="{{$.i18n.Tr "repo.debug"}}" | <div class="cloudbrain_debug" style="display: none;" data-debug="{{$.i18n.Tr "repo.debug"}}" | ||||
| data-debug-again="{{$.i18n.Tr "repo.debug_again"}}" data-debug-task="{{$.i18n.Tr "cloudbrain.DEBUG"}}" | data-debug-again="{{$.i18n.Tr "repo.debug_again"}}" data-debug-task="{{$.i18n.Tr "cloudbrain.DEBUG"}}" | ||||
| @@ -445,6 +445,18 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <div class="ui modal debug-again-alert"> | |||||
| <div class="ui message" style="background-color: rgba(242, 113, 28, 0.05);border: 1px solid rgba(242, 113, 28, 1);border-radius: 5px;"> | |||||
| <div style="display: flex;align-items: center;"> | |||||
| <i class="ri-information-line" style="font-size: 35px;color: rgba(242, 113, 28, 1);;"></i> | |||||
| <div style="text-align: left;margin-left: 1rem;"> | |||||
| <div style="font-weight: 600;line-height: 2;">{{.i18n.Tr "repo.cloudbrain.morethanonejob1" | Safe }}</div> | |||||
| <div style="color:#939393">{{.i18n.Tr "repo.cloudbrain.morethanonejob2" | Safe}}</div> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| </div> | </div> | ||||
| <script> | <script> | ||||
| @@ -21,7 +21,7 @@ | |||||
| <div class="badge-wrap"> | <div class="badge-wrap"> | ||||
| {{range $k,$v :=.RecentBadges}} | {{range $k,$v :=.RecentBadges}} | ||||
| {{if le $k 3}} | {{if le $k 3}} | ||||
| <div class="badge-img-avatar" title="{{.Name}}"><img style="width: 100%;height: 100%;" src="{{.LightedIcon}}" class="ui poping up" data-content="{{.Name}}" data-position="top center" data-variation="tiny inverted"></div> | |||||
| <div class="badge-img-avatar"><img style="width: 100%;height: 100%;" src="{{.LightedIcon}}" class="ui poping up" data-content="{{.Name}}" data-position="top center" data-variation="tiny inverted"></div> | |||||
| {{else}} | {{else}} | ||||
| <a class="badge-more-icon" href="{{$.Owner.HomeLink}}?tab=badge"><i class="ri-more-fill"></i></a> | <a class="badge-more-icon" href="{{$.Owner.HomeLink}}?tab=badge"><i class="ri-more-fill"></i></a> | ||||
| {{end}} | {{end}} | ||||