Browse Source

Add compiler backlog bonanza generation and enable pagination for issue search

Jack Huey 3 years ago
parent
commit
50e71c871b
4 changed files with 96 additions and 20 deletions
  1. 27 0
      src/agenda.rs
  2. 21 0
      src/bin/compiler.rs
  3. 36 20
      src/github.rs
  4. 12 0
      templates/compiler_backlog_bonanza.tt

+ 27 - 0
src/agenda.rs

@@ -599,3 +599,30 @@ pub fn lang_planning<'a>() -> Box<dyn Action> {
         ],
     })
 }
+
+// Things to add (maybe):
+// - Compiler RFCs
+// - P-high issues
+pub fn compiler_backlog_bonanza<'a>() -> Box<dyn Action> {
+    Box::new(Step {
+        name: "compiler_backlog_bonanza",
+        actions: vec![
+            Query {
+                repos: vec![
+                    ("rust-lang", "rust"),
+                ],
+                queries: vec![
+                    QueryMap {
+                        name: "tracking_issues",
+                        kind: QueryKind::List,
+                        query: Box::new(github::Query {
+                            filters: vec![("state", "open")],
+                            include_labels: vec!["C-tracking-issue"],
+                            exclude_labels: vec!["T-libs-api", "T-libs", "T-lang", "T-rustdoc"],
+                        }),
+                    },
+                ],
+            },
+        ],
+    })
+}

+ 21 - 0
src/bin/compiler.rs

@@ -0,0 +1,21 @@
+use triagebot::agenda;
+
+#[tokio::main(flavor = "current_thread")]
+async fn main() {
+    dotenv::dotenv().ok();
+    tracing_subscriber::fmt::init();
+
+    let args: Vec<String> = std::env::args().collect();
+    if args.len() == 2 {
+        match &args[1][..] {
+            "backlog_bonanza" => {
+                let agenda = agenda::compiler_backlog_bonanza();
+                print!("{}", agenda.call().await);
+                return;
+            }
+            _ => {}
+        }
+    }
+
+    eprintln!("Usage: compiler (backlog_bonanza)")
+}

+ 36 - 20
src/github.rs

@@ -852,10 +852,12 @@ pub struct Repository {
     pub full_name: String,
 }
 
+#[derive(Copy, Clone)]
 struct Ordering<'a> {
     pub sort: &'a str,
     pub direction: &'a str,
     pub per_page: &'a str,
+    pub page: usize,
 }
 
 impl Repository {
@@ -885,6 +887,7 @@ impl Repository {
             sort: "created",
             direction: "asc",
             per_page: "100",
+            page: 1,
         };
         let filters: Vec<_> = filters
             .clone()
@@ -916,27 +919,39 @@ impl Repository {
             || filters.iter().any(|&(key, _)| key == "no")
             || is_pr && !include_labels.is_empty();
 
-        let url = if use_search_api {
-            self.build_search_issues_url(&filters, include_labels, exclude_labels, ordering)
-        } else if is_pr {
-            self.build_pulls_url(&filters, include_labels, ordering)
-        } else {
-            self.build_issues_url(&filters, include_labels, ordering)
-        };
+        // If there are more than `per_page` of issues, we need to paginate
+        let mut issues = vec![];
+        loop {
+            let url = if use_search_api {
+                self.build_search_issues_url(&filters, include_labels, exclude_labels, ordering)
+            } else if is_pr {
+                self.build_pulls_url(&filters, include_labels, ordering)
+            } else {
+                self.build_issues_url(&filters, include_labels, ordering)
+            };
+    
+            let result = client.get(&url);
+            if use_search_api {
+                let result = client
+                    .json::<IssueSearchResult>(result)
+                    .await
+                    .with_context(|| format!("failed to list issues from {}", url))?;
+                issues.extend(result.items);
+                if issues.len() < result.total_count {
+                    ordering.page += 1;
+                    continue
+                }
+            } else {
+                // FIXME: paginate with non-search
+                issues = client
+                    .json(result)
+                    .await
+                    .with_context(|| format!("failed to list issues from {}", url))?
+            }
 
-        let result = client.get(&url);
-        if use_search_api {
-            let result = client
-                .json::<IssueSearchResult>(result)
-                .await
-                .with_context(|| format!("failed to list issues from {}", url))?;
-            Ok(result.items)
-        } else {
-            client
-                .json(result)
-                .await
-                .with_context(|| format!("failed to list issues from {}", url))
+            break;
         }
+        Ok(issues)
     }
 
     fn build_issues_url(
@@ -1013,12 +1028,13 @@ impl Repository {
             .collect::<Vec<_>>()
             .join("+");
         format!(
-            "{}/search/issues?q={}&sort={}&order={}&per_page={}",
+            "{}/search/issues?q={}&sort={}&order={}&per_page={}&page={}",
             Repository::GITHUB_API_URL,
             filters,
             ordering.sort,
             ordering.direction,
             ordering.per_page,
+            ordering.page,
         )
     }
 }

+ 12 - 0
templates/compiler_backlog_bonanza.tt

@@ -0,0 +1,12 @@
+{% import "_issues_heading.tt" as issues_heading %}
+{% import "_issues.tt" as issues %}
+---
+title: T-compiler backlog bonanza
+tags: backlog-bonanza
+---
+
+# T-compiler backlog bonanza
+
+## Tracking issues
+
+{{-issues_heading::render(issues=tracking_issues)}}