diff --git a/BUILD.bazel b/BUILD.bazel index 1f8a067..e5fde57 100644 --- a/BUILD.bazel +++ b/BUILD.bazel @@ -1,8 +1,17 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_path") load("@bazel_gazelle//:def.bzl", "gazelle") # gazelle:prefix k9bookshelf gazelle(name = "gazelle") +go_path( + name = "gopath", + mode = "link", + deps = [ + "//syncdata/cmd", + ], +) + config_setting( name = "darwin", values = {"cpu": "darwin"}, diff --git a/Makefile b/Makefile index 13666f0..e728c26 100644 --- a/Makefile +++ b/Makefile @@ -38,6 +38,9 @@ bin/syncdata: $(GO_FILES) WORKSPACE $(BZL) build //syncdata/cmd:all cp -f $(BZL_BIN)/syncdata/cmd/cmd_/cmd bin/syncdata +.PHONY: setup +setup: WORKSPACE */BUILD.bazel GOPATH + .PHONY: syncdata/BUILD.bazel gqlgenc/BUILD.bazel */BUILD.bazel: $(GO_FILES) @@ -45,3 +48,8 @@ bin/syncdata: $(GO_FILES) WORKSPACE WORKSPACE: go.mod syncdata/BUILD.bazel $(BZL) run //:gazelle -- update-repos -from_file=go.mod + +.PHONY: GOPATH + +GOPATH: + $(BZL) build //:gopath diff --git "a/contents/blogs/news/2020-11-25-\346\226\260\347\235\200\345\205\245\350\215\267.md" "b/contents/blogs/news/2020-11-25-\346\226\260\347\235\200\345\205\245\350\215\267.md" new file mode 100644 index 0000000..a988dbf --- /dev/null +++ "b/contents/blogs/news/2020-11-25-\346\226\260\347\235\200\345\205\245\350\215\267.md" @@ -0,0 +1,6 @@ +以下の書籍が入荷しました。 + +* [Language Implementation Patterns: Create Your Own Domain-Specific and General Programming Languages](https://k9bookshelf.com/products/language-implementation-patterns-create-your-own-domain-specific-and-general-programming-languages) +* [Node.js Design Patterns - Third edition: Design and implement production-grade Node.js applications using proven patterns and techniques](https://k9bookshelf.com/products/node-js-design-patterns-third-edition-design-and-implement-production-grade-node-js-applications-using-proven-patterns-and-techniques) +* [The Maker's Guide to the Zombie Apocalypse: Defend Your Base with Simple Circuits, Arduino, and Raspberry Pi](https://k9bookshelf.com/products/the-makers-guide-to-the-zombie-apocalypse-defend-your-base-with-simple-circuits-arduino-and-raspberry-pi) +* [Programming Webassembly with Rust: Unified Development for Web, Mobile, and Embedded Applications (1ST ed.)](https://k9bookshelf.com/products/programming-webassembly-with-rust) diff --git "a/contents/blogs/news/\346\226\260\347\235\200\345\205\245\350\215\267.md" "b/contents/blogs/news/\346\226\260\347\235\200\345\205\245\350\215\267.md" deleted file mode 100644 index dd9e167..0000000 --- "a/contents/blogs/news/\346\226\260\347\235\200\345\205\245\350\215\267.md" +++ /dev/null @@ -1,6 +0,0 @@ -以下の書籍が入荷しました - -- Language Implementation Patterns: Create Your Own Domain-Specific and General Programming Languages -- Node.js Design Patterns - Third edition: Design and implement production-grade Node.js applications using proven patterns and techniques -- The Maker's Guide to the Zombie Apocalypse: Defend Your Base with Simple Circuits, Arduino, and Raspberry Pi -- Programming Webassembly with Rust: Unified Development for Web, Mobile, and Embedded Applications (1ST ed.) diff --git a/contents/pages/contact.md b/contents/pages/contact.md index 2cea7b6..3658864 100644 --- a/contents/pages/contact.md +++ b/contents/pages/contact.md @@ -1,2 +1 @@ k9bookshelfに関するお問い合わせはこちらからお願いいたします。 - diff --git a/syncdata/BUILD.bazel b/syncdata/BUILD.bazel index 44a8937..a332748 100644 --- a/syncdata/BUILD.bazel +++ b/syncdata/BUILD.bazel @@ -4,6 +4,9 @@ go_library( name = "go_default_library", srcs = [ "article.go", + "client.go", + "deploy.go", + "download.go", "syncdata.go", ], importpath = "k9bookshelf/syncdata", diff --git a/syncdata/article.go b/syncdata/article.go index b382bc6..39cf87b 100644 --- a/syncdata/article.go +++ b/syncdata/article.go @@ -11,19 +11,19 @@ import ( // Article is documented at https://shopify.dev/docs/admin-api/rest/reference/online-store/article type Article struct { ID int64 `json:"id"` - Title string `json:"title"` - CreatedAt *time.Time `json:"created_at"` - BodyHTML string `json:"body_html"` - BlogID int64 `json:"blog_id"` - Author string `json:"author"` - UserID int64 `json:"user_id"` - PublishedAt *time.Time `json:"published_at"` - UpdatedAt *time.Time `json:"updated_at"` - SummaryHTML *string `json:"summary_html"` - TemplateSuffix *string `json:"template_suffix"` - Handle string `json:"handle"` - Tags string `json:"tags"` - AdminGraphqlAPIID string `json:"admin_graphql_api_id"` + Title string `json:"title,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + BodyHTML string `json:"body_html,omitempty"` + BlogID int64 `json:"blog_id,omitempty"` + Author string `json:"author,omitempty"` + UserID int64 `json:"user_id,omitempty"` + PublishedAt *time.Time `json:"published_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` + SummaryHTML *string `json:"summary_html,omitempty"` + TemplateSuffix *string `json:"template_suffix,omitempty"` + Handle string `json:"handle,omitempty"` + Tags string `json:"tags,omitempty"` + AdminGraphqlAPIID string `json:"admin_graphql_api_id,omitempty"` } // Articles is not documented yet. @@ -31,6 +31,11 @@ type Articles struct { Articles []Article `json:"articles"` } +// ArticlePayload is not documented yet. +type ArticlePayload struct { + Article Article `json:"article"` +} + // ArticleResource is not documented yet. type ArticleResource struct { client *shopify.Client @@ -50,3 +55,13 @@ func (a *ArticleResource) List(blogID int64) (*Articles, error) { } return &articles, nil } + +// Put update article +func (a *ArticleResource) Put(article Article) (*Article, error) { + var response Article + err := a.client.Put(path.Join("admin", "api", apiVersion, "blogs", fmt.Sprint(article.BlogID), "articles", fmt.Sprintf("%d.json", article.ID)), ArticlePayload{Article: article}, &response) + if err != nil { + return nil, err + } + return &response, nil +} diff --git a/syncdata/client.go b/syncdata/client.go new file mode 100644 index 0000000..e058604 --- /dev/null +++ b/syncdata/client.go @@ -0,0 +1,57 @@ +package syncdata + +import ( + "context" + "fmt" + "k9bookshelf/generated" + "net/http" + + "github.com/Yamashou/gqlgenc/client" + shopify "github.com/bold-commerce/go-shopify" +) + +func establishGqlClient() (*generated.Client, context.Context) { + authHeader := func(req *http.Request) { + req.Header.Set("X-Shopify-Access-Token", appSecret) + } + + return &generated.Client{ + Client: client.NewClient(http.DefaultClient, + fmt.Sprintf("https://%s/admin/api/%s/graphql.json", shopDomain, apiVersion), + authHeader), + }, context.Background() +} + +func establishRestClient() *shopify.Client { + app := shopify.App{ + ApiKey: appKey, + ApiSecret: appSecret, + } + + return shopify.NewClient(app, shopDomain, appSecret, shopify.WithVersion(apiVersion)) +} + +func fetchProducts(ctx context.Context, adminClient *generated.Client) (*generated.Products, error) { + var cursor *string + var res *generated.Products + + for { + tmpRes, err := adminClient.Products(ctx, 10, cursor) + if err != nil { + return nil, err + } + if res == nil { + res = tmpRes + } else { + res.Products.Edges = append(res.Products.Edges, tmpRes.Products.Edges...) + } + + if !tmpRes.Products.PageInfo.HasNextPage { + break + } else { + last := tmpRes.Products.Edges[len(tmpRes.Products.Edges)-1] + cursor = &last.Cursor + } + } + return res, nil +} diff --git a/syncdata/cmd/main.go b/syncdata/cmd/main.go index aac31c8..8d78474 100644 --- a/syncdata/cmd/main.go +++ b/syncdata/cmd/main.go @@ -2,6 +2,7 @@ package main import ( "fmt" + "log" "os" "k9bookshelf/syncdata" @@ -25,8 +26,7 @@ var deployCmd = &cobra.Command{ Run: func(cmd *cobra.Command, args []string) { err := syncdata.Deploy(cmd.Flag("input").Value.String()) if err != nil { - fmt.Fprintln(os.Stderr, err) - os.Exit(1) + log.Fatal(err) } }, } @@ -37,8 +37,7 @@ var downloadCmd = &cobra.Command{ Run: func(cmd *cobra.Command, args []string) { err := syncdata.Download(cmd.Flag("output").Value.String()) if err != nil { - fmt.Fprintln(os.Stderr, err) - os.Exit(1) + log.Fatal(err) } }, } @@ -55,7 +54,6 @@ func main() { rootCmd.AddCommand(deployCmd) if err := rootCmd.Execute(); err != nil { - fmt.Fprintln(os.Stderr, err) - os.Exit(1) + log.Fatal(err) } } diff --git a/syncdata/deploy.go b/syncdata/deploy.go new file mode 100644 index 0000000..bd34d80 --- /dev/null +++ b/syncdata/deploy.go @@ -0,0 +1,312 @@ +package syncdata + +import ( + "fmt" + "io/ioutil" + "k9bookshelf/generated" + "os" + "path" + "path/filepath" + "sync" + + shopify "github.com/bold-commerce/go-shopify" + "github.com/gomarkdown/markdown" + "github.com/vbauerster/mpb" + "github.com/vbauerster/mpb/decor" +) + +func deployProducts(contents []Content, bar *mpb.Bar) error { + gqlClient, ctx := establishGqlClient() + wg := sync.WaitGroup{} + c := make(chan error) + for _, content := range contents { + wg.Add(1) + + go func(handle, html string) { + defer wg.Done() + defer bar.Increment() + + productByHandle, err := gqlClient.ProductByHandle(ctx, handle) + if err != nil { + c <- err + return + } + + res, err := gqlClient.Deploy( + ctx, + generated.ProductInput{ + ID: &productByHandle.ProductByHandle.ID, + Handle: &handle, + DescriptionHTML: &html, + }, + ) + if err != nil { + c <- err + return + } + if len(res.ProductUpdate.UserErrors) > 0 { + var errorBuf string + for _, userError := range res.ProductUpdate.UserErrors { + errorBuf += fmt.Sprintf("'%s': '%s'\n", userError.Field, userError.Message) + } + c <- fmt.Errorf("{\n%s}", errorBuf) + return + } + }(content.handle, content.html) + } + go func() { + wg.Wait() + c <- nil + }() + + err := <-c + return err +} + +func deployPages(contents []Content, bar *mpb.Bar) error { + var err error + adminClient := establishRestClient() + wg := sync.WaitGroup{} + c := make(chan error) + for _, content := range contents { + wg.Add(1) + + go func(handle, html string) { + defer wg.Done() + defer bar.Increment() + + pages, err := adminClient.Page.List(nil) + if err != nil { + c <- err + return + } + var page shopify.Page + for _, p := range pages { + if p.Handle == handle { + page = p + break + } + } + + // NOTE: Because Page struct isn't tagged by `omitempty` and Metafields are initialized with nil, + metafields := []shopify.Metafield{} + if page.Metafields != nil { + metafields = page.Metafields + } + _, err = adminClient.Page.Update(shopify.Page{ + ID: page.ID, + Author: page.Author, + Handle: handle, + Title: page.Title, + CreatedAt: page.CreatedAt, + UpdatedAt: page.UpdatedAt, + BodyHTML: html, + TemplateSuffix: page.TemplateSuffix, + PublishedAt: page.PublishedAt, + ShopID: page.ShopID, + Metafields: metafields, + }) + + if err != nil { + c <- err + return + } + }(content.handle, content.html) + } + go func() { + wg.Wait() + c <- nil + }() + + err = <-c + return err +} + +func deployBlogs(blogs map[string][]Content, bar *mpb.Bar) error { + var err error + adminClient := establishRestClient() + wg := sync.WaitGroup{} + c := make(chan error) + + currentBlogs, err := adminClient.Blog.List(nil) + if err != nil { + return err + } + + for _blogCategory, _blogContents := range blogs { + wg.Add(1) + var b *shopify.Blog + go func(blogCategory string, blogContents []Content) { + defer wg.Done() + + for _, currentBlog := range currentBlogs { + if currentBlog.Handle == blogCategory { + b = ¤tBlog + break + } + } + if b == nil { + c <- fmt.Errorf("blog category [%s] is not exist", blogCategory) + return + } + + articles, err := NewArticleResource(adminClient).List(b.ID) + if err != nil { + c <- err + return + } + + for _, _content := range blogContents { + wg.Add(1) + go func(content Content) { + defer wg.Done() + defer bar.Increment() + + var article *Article + for _, a := range articles.Articles { + if content.handle == a.Handle { + article = &a + } + } + if article == nil { + c <- fmt.Errorf("blog article [%s] is not exist", content.handle) + return + } + _, err = NewArticleResource(adminClient).Put(Article{ + ID: article.ID, + BlogID: article.BlogID, + Handle: content.handle, + BodyHTML: content.html, + }) + if err != nil { + c <- err + return + } + }(_content) + } + }(_blogCategory, _blogContents) + + } + go func() { + wg.Wait() + c <- nil + }() + + err = <-c + return err +} + +func filesToContents(inputDir string, files []os.FileInfo) ([]Content, error) { + contents := []Content{} + for _, file := range files { + filename := file.Name() + handle := filename[0 : len(filename)-len(filepath.Ext(filename))] + md, err := ioutil.ReadFile(path.Join(inputDir, filename)) + if err != nil { + return nil, err + } + html := string(markdown.ToHTML(md, nil, nil)) + contents = append(contents, Content{ + handle: handle, + html: html, + }) + } + return contents, nil +} + +type tmpIterable struct { + f func(bar *mpb.Bar) error + numberOfContents int +} + +// Deploy uploads contents to store +func Deploy(input string) error { + rawProducts, err := ioutil.ReadDir(path.Join(input, "products")) + if err != nil { + return err + } + products, err := filesToContents(path.Join(input, "products"), rawProducts) + if err != nil { + return err + } + + rawPages, err := ioutil.ReadDir(path.Join(input, "pages")) + if err != nil { + return err + } + pages, err := filesToContents(path.Join(input, "pages"), rawPages) + + rawBlogs, err := ioutil.ReadDir(path.Join(input, "blogs")) + if err != nil { + return err + } + + blogs := map[string][]Content{} + numberOfBlogs := 0 + for _, b := range rawBlogs { + if b.IsDir() { + files, err := ioutil.ReadDir(path.Join(input, "blogs", b.Name())) + if err != nil { + return err + } + contents, err := filesToContents(path.Join(input, "blogs", b.Name()), files) + if err != nil { + return err + } + numberOfBlogs += len(contents) + blogs[b.Name()] = contents + } + } + + wg := sync.WaitGroup{} + p := mpb.New(mpb.WithWaitGroup(&wg)) + + c := make(chan error) + for name, _f := range map[string]tmpIterable{ + "products": { + f: func(bar *mpb.Bar) error { + return deployProducts(products, bar) + }, + numberOfContents: len(products), + }, + "pages": { + f: func(bar *mpb.Bar) error { + return deployPages(pages, bar) + }, + numberOfContents: len(pages), + }, + "blogs": { + f: func(bar *mpb.Bar) error { + return deployBlogs(blogs, bar) + }, + numberOfContents: numberOfBlogs, + }, + } { + wg.Add(1) + bar := p.AddBar(int64(_f.numberOfContents), + mpb.PrependDecorators( + decor.Name(path.Join(input, name)), + decor.Percentage(decor.WCSyncSpace), + ), + mpb.AppendDecorators( + decor.OnComplete( + decor.EwmaETA(decor.ET_STYLE_GO, 60), "done", + ), + ), + ) + go func(f func(bar *mpb.Bar) error) { + defer wg.Done() + if err = f(bar); err != nil { + c <- err + return + } + }(_f.f) + } + go func() { + p.Wait() + c <- nil + }() + + err = <-c + return err +} diff --git a/syncdata/download.go b/syncdata/download.go new file mode 100644 index 0000000..f7dee31 --- /dev/null +++ b/syncdata/download.go @@ -0,0 +1,129 @@ +package syncdata + +import ( + "os" + "path" + "strings" + "sync" + + "github.com/mattn/godown" + "github.com/vbauerster/mpb" + "github.com/vbauerster/mpb/decor" +) + +func dowloadContens(output string, contents *[]Content, bar *mpb.Bar) error { + err := os.MkdirAll(output, os.ModePerm) + if err != nil { + return err + } + var wg sync.WaitGroup + + for _, content := range *contents { + wg.Add(1) + c := make(chan error) + + go func(handle, descriptionHTML string) { + defer wg.Done() + defer bar.Increment() + + file, err := os.Create(path.Join(output, handle+".md")) + if err != nil { + c <- err + return + } + err = godown.Convert(file, strings.NewReader(descriptionHTML), nil) + if err != nil { + c <- err + return + } + c <- nil + }(content.handle, content.html) + err = <-c + if err != nil { + return err + } + } + + wg.Wait() + return nil +} + +// Download downloads contents from store +func Download(output string) error { + adminClient, ctx := establishGqlClient() + restClient := establishRestClient() + + res, err := fetchProducts(ctx, adminClient) + if err != nil { + return err + } + products := Contents{kind: "products"} + for _, product := range res.Products.Edges { + products.items = append(products.items, Content{ + handle: product.Node.Handle, + html: product.Node.DescriptionHTML, + }) + } + + rawPages, err := restClient.Page.List(nil) + if err != nil { + return err + } + pages := Contents{kind: "pages"} + for _, page := range rawPages { + pages.items = append(pages.items, Content{ + handle: page.Handle, + html: page.BodyHTML, + }) + } + + rawBlogs, err := restClient.Blog.List(nil) + if err != nil { + return err + } + blogs := []Contents{} + for _, blog := range rawBlogs { + contents := Contents{ + kind: path.Join("blogs", blog.Handle), + } + articles, err := NewArticleResource(restClient).List(blog.ID) + if err != nil { + return err + } + for _, article := range articles.Articles { + contents.items = append(contents.items, Content{ + handle: article.Handle, + html: article.BodyHTML, + }) + } + blogs = append(blogs, contents) + } + + var wg sync.WaitGroup + progress := mpb.New(mpb.WithWaitGroup(&wg)) + for _, cts := range append([]Contents{products, pages}, blogs...) { + wg.Add(1) + bar := progress.AddBar(int64(len(cts.items)), + mpb.PrependDecorators( + decor.Name(cts.kind), + decor.Percentage(decor.WCSyncSpace), + ), + mpb.AppendDecorators( + decor.OnComplete( + decor.EwmaETA(decor.ET_STYLE_GO, 60), "done", + ), + ), + ) + + go func(o string, items []Content, b *mpb.Bar) { + defer wg.Done() + err = dowloadContens(o, &items, b) + }(path.Join(output, cts.kind), cts.items, bar) + if err != nil { + return err + } + } + + progress.Wait() + return nil +} diff --git a/syncdata/syncdata.go b/syncdata/syncdata.go index 55d2ce1..2826040 100644 --- a/syncdata/syncdata.go +++ b/syncdata/syncdata.go @@ -1,23 +1,7 @@ package syncdata import ( - "context" - "fmt" - "io/ioutil" - "k9bookshelf/generated" - "net/http" "os" - "path" - "path/filepath" - "strings" - "sync" - - "github.com/Yamashou/gqlgenc/client" - shopify "github.com/bold-commerce/go-shopify" - "github.com/gomarkdown/markdown" - "github.com/mattn/godown" - "github.com/vbauerster/mpb" - "github.com/vbauerster/mpb/decor" ) const apiVersion string = "2020-10" @@ -27,52 +11,6 @@ var appKey string = os.Getenv("MARKDOWN_APP_KEY") var appSecret string = os.Getenv("MARKDOWN_APP_SECRET") var shopToken string = appSecret -func gqlClient() (*generated.Client, context.Context) { - authHeader := func(req *http.Request) { - req.Header.Set("X-Shopify-Access-Token", appSecret) - } - - return &generated.Client{ - Client: client.NewClient(http.DefaultClient, - fmt.Sprintf("https://%s/admin/api/%s/graphql.json", shopDomain, apiVersion), - authHeader), - }, context.Background() -} - -func establishRestClient() *shopify.Client { - app := shopify.App{ - ApiKey: appKey, - ApiSecret: appSecret, - } - - return shopify.NewClient(app, shopDomain, appSecret, shopify.WithVersion(apiVersion)) -} - -func fetchProducts(ctx context.Context, adminClient *generated.Client) (*generated.Products, error) { - var cursor *string - var res *generated.Products - - for { - tmpRes, err := adminClient.Products(ctx, 10, cursor) - if err != nil { - return nil, err - } - if res == nil { - res = tmpRes - } else { - res.Products.Edges = append(res.Products.Edges, tmpRes.Products.Edges...) - } - - if !tmpRes.Products.PageInfo.HasNextPage { - break - } else { - last := tmpRes.Products.Edges[len(tmpRes.Products.Edges)-1] - cursor = &last.Cursor - } - } - return res, nil -} - // Content is not documented yet. type Content struct { handle string @@ -84,198 +22,3 @@ type Contents struct { kind string items []Content } - -func dowloadContens(output string, contents *[]Content, bar *mpb.Bar) error { - err := os.MkdirAll(output, os.ModePerm) - if err != nil { - return err - } - var wg sync.WaitGroup - - for _, content := range *contents { - wg.Add(1) - c := make(chan error) - - go func(handle, descriptionHTML string) { - defer wg.Done() - defer bar.Increment() - - file, err := os.Create(path.Join(output, handle+".md")) - if err != nil { - c <- err - return - } - err = godown.Convert(file, strings.NewReader(descriptionHTML), nil) - if err != nil { - c <- err - return - } - c <- nil - }(content.handle, content.html) - err = <-c - if err != nil { - return err - } - } - - wg.Wait() - return nil -} - -// Download downloads contents from store -func Download(output string) error { - adminClient, ctx := gqlClient() - restClient := establishRestClient() - - res, err := fetchProducts(ctx, adminClient) - if err != nil { - return err - } - products := Contents{kind: "products"} - for _, product := range res.Products.Edges { - products.items = append(products.items, Content{ - handle: product.Node.Handle, - html: product.Node.DescriptionHTML, - }) - } - - rawPages, err := restClient.Page.List(nil) - if err != nil { - return err - } - pages := Contents{kind: "pages"} - for _, page := range rawPages { - pages.items = append(pages.items, Content{ - handle: page.Handle, - html: page.BodyHTML, - }) - } - - rawBlogs, err := restClient.Blog.List(nil) - if err != nil { - return err - } - blogs := []Contents{} - for _, blog := range rawBlogs { - contents := Contents{ - kind: path.Join("blogs", blog.Handle), - } - articles, err := NewArticleResource(restClient).List(blog.ID) - if err != nil { - return err - } - for _, article := range articles.Articles { - contents.items = append(contents.items, Content{ - handle: article.Handle, - html: article.BodyHTML, - }) - } - blogs = append(blogs, contents) - } - - var wg sync.WaitGroup - progress := mpb.New(mpb.WithWaitGroup(&wg)) - for _, cts := range append([]Contents{products, pages}, blogs...) { - wg.Add(1) - bar := progress.AddBar(int64(len(cts.items)), - mpb.PrependDecorators( - decor.Name(cts.kind), - decor.Percentage(decor.WCSyncSpace), - ), - mpb.AppendDecorators( - decor.OnComplete( - decor.EwmaETA(decor.ET_STYLE_GO, 60), "done", - ), - ), - ) - - go func(o string, items []Content, b *mpb.Bar) { - defer wg.Done() - err = dowloadContens(o, &items, b) - }(path.Join(output, cts.kind), cts.items, bar) - if err != nil { - return err - } - } - - progress.Wait() - return nil -} - -// Deploy uploads contents to store -func Deploy(input string) error { - files, err := ioutil.ReadDir(path.Join(input, "products")) - if err != nil { - return err - } - adminClient, ctx := gqlClient() - wg := sync.WaitGroup{} - p := mpb.New(mpb.WithWaitGroup(&wg)) - bar := p.AddBar(int64(len(files)), - mpb.PrependDecorators( - decor.Name(path.Join(input, "products")), - decor.Percentage(decor.WCSyncSpace), - ), - mpb.AppendDecorators( - decor.OnComplete( - decor.EwmaETA(decor.ET_STYLE_GO, 60), "done", - ), - ), - ) - - for _, file := range files { - wg.Add(1) - c := make(chan error) - filename := file.Name() - - go func(handle, pathToFile string) { - defer wg.Done() - defer bar.Increment() - - productByHandle, err := adminClient.ProductByHandle(ctx, handle) - if err != nil { - c <- err - return - } - - md, err := ioutil.ReadFile(pathToFile) - if err != nil { - c <- err - return - } - descriptionHTML := string(markdown.ToHTML(md, nil, nil)) - - res, err := adminClient.Deploy( - ctx, - generated.ProductInput{ - ID: &productByHandle.ProductByHandle.ID, - Handle: &handle, - DescriptionHTML: &descriptionHTML, - }, - ) - if err != nil { - c <- err - return - } - if len(res.ProductUpdate.UserErrors) > 0 { - var errorBuf string - for _, userError := range res.ProductUpdate.UserErrors { - errorBuf += fmt.Sprintf("'%s': '%s'\n", userError.Field, userError.Message) - } - c <- fmt.Errorf("{\n%s}", errorBuf) - return - } - c <- nil - }( - filename[0:len(filename)-len(filepath.Ext(filename))], - path.Join(input, "products", filename), - ) - - err = <-c - if err != nil { - return err - } - } - p.Wait() - return nil -}