GO: Confluence API无法获取所有附件。

huangapple go评论78阅读模式
英文:

GO: Confluence API not getting all the attachments

问题

我正在使用golang开发我的应用程序,在这个应用程序中,我尝试从Confluence获取附件。以下是详细信息:

req := "https://domain.atlassian.net/wiki/rest/api/content?expand=body.view,version&type=page&start=0&limit="
res, err := w.sendRequest(req)
if err != nil {
    return nil, err
}
if strings.EqualFold(contentID, "") == false {
    if len(res.Results) != 0 {
        for i, _ := range res.Results {
            Log.Info("files processed is:", i)
            extension := filepath.Ext(res.Results[i].Title)
            isExtenstionExclude := isExcludedExtenstion(sbConfig, extension)
            ispathExclude := isExcludedFolder(sbConfig, res.Results[i].Links.Webui)
            if sbgoclient.ExtesionMap[extension] == 0 || isExtenstionExclude == true || ispathExclude == true {
                binarycount++
                Log.Info("Excluded by extension" + extension + " for file" + res.Results[i].Title)
            } else {
                md5HashInBytes := md5.Sum([]byte(res.Results[i].Title))
                md5HashInString := hex.EncodeToString(md5HashInBytes[:])
                file_path := parameter[0] + "/" + md5HashInString + strings.Replace(res.Results[i].Title, " ", "", -1)
                file, err := os.Create(file_path)

                if err != nil {
                    fmt.Println(err)
                    panic(err)
                }
                url_1 := sbConfig.ConfluenceUrl + res.Results[i].Links.Download
                req, err := http.NewRequest("GET", url_1, nil)
                resp, _ := w.client.Do(req) // add a filter to check redirect

                if err != nil {
                    fmt.Println(err)
                    panic(err)
                }
                // Close body on function exit
                defer resp.Body.Close()
                fmt.Println(resp.Status)

                size, err = io.Copy(file, resp.Body)

                if err != nil {
                    panic(err)
                }

                defer file.Close()
                fmt.Printf("%s with %v bytes downloaded", res.Results[i].Title, size)
                meta := map[string]string{
                    "size": strconv.FormatInt(size, 10),
                }
            }
        }
    }
} else {
    if len(res.Results) != 0 {
        for i, _ := range res.Results {
            Log.Info("page indexing is", res.Results[i].Title, "and i value is:", i)
            fmt.Println("hmtl content is", res.Results[i].Body.View.Value)
            fmt.Println("page name is:", res.Results[i].Title)
            md5HashInBytes := md5.Sum([]byte(res.Results[i].Title))
            md5HashInString := hex.EncodeToString(md5HashInBytes[:])
            file_path := parameter[0] + "/" + md5HashInString + strings.Replace(res.Results[i].Title, " ", "", -1) + ".html"
            file, err := os.Create(file_path)

            if err != nil {
                fmt.Println(err)
                panic(err)
            }
            defer file.Close()
            html_content := "<html><body>" + res.Results[i].Body.View.Value + "</body></html>"
            err = ioutil.WriteFile(file.Name(), []byte(html_content), 0777)
            if err != nil {
                fmt.Println("error writing into file", err)
                panic(err)
            }
            file.Close()
        }
    }
}
func (w *Wiki) sendRequest(req *http.Request) (*vijay_content, error) {
    var testjson vijay_content
    req.Header.Add("Accept", "application/json, */*")
    w.authMethod.auth(req)

    resp, err := w.client.Do(req)
    if err != nil {
        return nil, err
    }
    bodyBytes, _ := ioutil.ReadAll(resp.Body)
    body := string(bodyBytes)

    fmt.Printf("response is %s\n", body)
    err = json.Unmarshal(bodyBytes, &testjson)
    if err != nil {
        fmt.Println("error here is", err)
        return nil, err
    }

    switch resp.StatusCode {
    case http.StatusOK, http.StatusCreated, http.StatusPartialContent:
        return &testjson, nil
    case http.StatusNoContent, http.StatusResetContent:
        return nil, nil
    case http.StatusUnauthorized:
        return nil, fmt.Errorf("Authentication failed.")
    case http.StatusServiceUnavailable:
        return nil, fmt.Errorf("Service is not available (%s).", resp.Status)
    case http.StatusInternalServerError:
        return nil, fmt.Errorf("Internal server error: %s", resp.Status)
    }

    return nil, fmt.Errorf("Unknown response status %s", resp.Status)
}

在这个Confluence域中,我有1000多个文档,但我只能下载大约80到90个,我不知道发生了什么,请建议是否需要进行任何更改。

以下是用于从响应JSON中获取值的结构体:

type Links struct {
    Download string `json:"download,omitempty"`
    Self     string `json:"self,omitempty"`
    Webui    string `json:"webui,omitempty"`
}
type View_struct struct {
    Value string `json:",innerxml"`
}
type Body_struct struct {
    View View_struct `json:"view,omitempty"`
}
type Vijay_Results struct {
    ID    string      `json:"id,omitempty"`
    Links Links       `json:"_links,omitempty"`
    Title string      `json:"title,omitempty"`
    Body  Body_struct `json:"body,omitempty"`
}
type vijay_content struct {
    Results []Vijay_Results `json:"results,omitempty"`
    Start   int             `json:"start,omitempty"`
    Limit   int             `json:"limit,omitempty"`
    Size    int             `json:"size,omitempty"`
}

请注意,我只提供了翻译的代码部分,不包括其他内容。

英文:

i am using golang for my application and in this application i tried getting attachments from Confluence, following are detail

<!-- begin snippet: js hide: false console: true babel: false -->

<!-- language: lang-golang-->

req:=&quot;https://domain.atlassian.net/wiki/rest/api/content?expand=body.view,version&amp;type=page&amp;start=0&amp;limit=&quot;
res, err := w.sendRequest(req)
if err != nil {
return nil, err
}
if strings.EqualFold(contentID, &quot;&quot;) == false {
if len(res.Results) != 0 {
for i, _ := range res.Results {
Log.Info(&quot;files processed is:&quot;, i)
extension := filepath.Ext(res.Results[i].Title)
isExtenstionExclude := isExcludedExtenstion(sbConfig, extension)
ispathExclude := isExcludedFolder(sbConfig, res.Results[i].Links.Webui)
if sbgoclient.ExtesionMap[extension] == 0 || isExtenstionExclude == true || ispathExclude == true {
binarycount++
Log.Info(&quot;Excluded by extension&quot; + extension + &quot; for file&quot; + res.Results[i].Title)
} else {
md5HashInBytes := md5.Sum([]byte(res.Results[i].Title))
md5HashInString := hex.EncodeToString(md5HashInBytes[:])
file_path := parameter[0] + &quot;/&quot; + md5HashInString + strings.Replace(res.Results[i].Title, &quot; &quot;, &quot;&quot;, -1)
file, err := os.Create(file_path)
if err != nil {
fmt.Println(err)
panic(err)
}
url_1 := sbConfig.ConfluenceUrl + res.Results[i].Links.Download
req, err := http.NewRequest(&quot;GET&quot;, url_1, nil)
resp, _ := w.client.Do(req) // add a filter to check redirect
if err != nil {
fmt.Println(err)
panic(err)
}
// Close body on function exit
defer resp.Body.Close()
fmt.Println(resp.Status)
size, err = io.Copy(file, resp.Body)
if err != nil {
panic(err)
}
defer file.Close()
fmt.Printf(&quot;%s with %v bytes downloaded&quot;, res.Results[i].Title, size)
meta := map[string]string{
&quot;size&quot;: strconv.FormatInt(size, 10),
}
}
}
}
} else {
if len(res.Results) != 0 {
for i, _ := range res.Results {
Log.Info(&quot;page indexing is&quot;, res.Results[i].Title, &quot;and i value is:&quot;, i)
fmt.Println(&quot;hmtl content is&quot;, res.Results[i].Body.View.Value)
fmt.Println(&quot;page name is:&quot;, res.Results[i].Title)
md5HashInBytes := md5.Sum([]byte(res.Results[i].Title))
md5HashInString := hex.EncodeToString(md5HashInBytes[:])
file_path := parameter[0] + &quot;/&quot; + md5HashInString + strings.Replace(res.Results[i].Title, &quot; &quot;, &quot;&quot;, -1) + &quot;.html&quot;
file, err := os.Create(file_path)
if err != nil {
fmt.Println(err)
panic(err)
}
defer file.Close()
html_content := &quot;&lt;html&gt;&lt;body&gt;&quot; + res.Results[i].Body.View.Value + &quot;&lt;/body&gt;&lt;/html&gt;&quot;
err = ioutil.WriteFile(file.Name(), []byte(html_content), 0777)
if err != nil {
fmt.Println(&quot;error writing into file&quot;, err)
panic(err)
}
file.Close()    				
}

<!-- end snippet -->

<!-- begin snippet: js hide: false console: true babel: false -->

<!-- language: lang-golang -->

func (w *Wiki) sendRequest(req *http.Request) (*vijay_content, error) {
var testjson vijay_content
req.Header.Add(&quot;Accept&quot;, &quot;application/json, */*&quot;)
w.authMethod.auth(req)
resp, err := w.client.Do(req)
if err != nil {
return nil, err
}
bodyBytes, _ := ioutil.ReadAll(resp.Body)
body := string(bodyBytes)
fmt.Printf(&quot;response is %s\n&quot;, body)
err = json.Unmarshal(bodyBytes, &amp;testjson)
if err != nil {
fmt.Println(&quot;error here is&quot;, err)
return nil, err
}
switch resp.StatusCode {
case http.StatusOK, http.StatusCreated, http.StatusPartialContent:
return &amp;testjson, nil
case http.StatusNoContent, http.StatusResetContent:
return nil, nil
case http.StatusUnauthorized:
return nil, fmt.Errorf(&quot;Authentication failed.&quot;)
case http.StatusServiceUnavailable:
return nil, fmt.Errorf(&quot;Service is not available (%s).&quot;, resp.Status)
case http.StatusInternalServerError:
return nil, fmt.Errorf(&quot;Internal server error: %s&quot;, resp.Status)
}
return nil, fmt.Errorf(&quot;Unknown response status %s&quot;, resp.Status)
}

<!-- end snippet -->

and here in this confluence domain actually i have more than 1000 documents but i am able to download only around 80 to 90, i don't know whats happening here please suggest any changes to be done
and following is the struct used to get values from response json

<!-- begin snippet: js hide: false console: true babel: false -->

<!-- language: lang-golang -->

type Links struct {
Download string `json:&quot;download,omitempty&quot;`
Self     string `json:&quot;self,omitempty&quot;`
Webui    string `json:&quot;webui,omitempty&quot;`
}
type View_struct struct {
Value string `json:&quot;,innerxml&quot;`
}
type Body_struct struct {
View View_struct `json:&quot;view,omitempty&quot;`
}
type Vijay_Results struct {
ID    string      `json:&quot;id,omitempty&quot;`
Links Links       `json:&quot;_links,omitempty&quot;`
Title string      `json:&quot;title,omitempty&quot;`
Body  Body_struct `json:&quot;body,omitempty&quot;`
}
type vijay_content struct {
Results []Vijay_Results `json:&quot;results,omitempty&quot;`
Start   int             `json:&quot;start,omitempty&quot;`
Limit   int             `json:&quot;limit,omitempty&quot;`
Size    int             `json:&quot;size,omitempty&quot;`
}

<!-- end snippet -->

答案1

得分: 1

API对结果进行分页。您应该通过指定startlimit来进行多次请求以获取整个列表。

例如,使用start=0&amp;limit=30请求前30个文档的列表,然后使用start=30&amp;limit=30请求接下来的30个文档,以此类推,直到收到一个空列表的响应。

您可以在分页文档中阅读更多详细信息。

英文:

The API paginates the results. You should fetch the whole list in multiple requests by specifying start and limit.

E.g. request the list of first 30 documents with start=0&amp;limit=30, then the next 30 with start=30&amp;limit=30, and so on, until you get a response with empty list.

You can read more details in the docs on pagination.

huangapple
  • 本文由 发表于 2017年6月30日 16:35:55
  • 转载请务必保留本文链接:https://go.coder-hub.com/44842132.html
匿名

发表评论

匿名网友

:?: :razz: :sad: :evil: :!: :smile: :oops: :grin: :eek: :shock: :???: :cool: :lol: :mad: :twisted: :roll: :wink: :idea: :arrow: :neutral: :cry: :mrgreen:

确定