[go: up one dir, main page]

aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorn1 <hrdina.pavel@gmail.com>2024-02-05 12:00:19 +0100
committern1 <hrdina.pavel@gmail.com>2024-02-05 12:00:19 +0100
commit3b9134eed5ebaf260b4d307c2283be3f456511fc (patch)
tree0e73d51bafd54977fe79f42f4ce4ab9d01f174c4
parent7cc6ab7ce0a1565b0b724f3e5435abde8e9fbcba (diff)
Updated: refactoring + tracing added.0.1.3
- tracing added (--trace) - possibility to fetch failed jobs traces (logs) - inner refactoring - much better error hangling
-rw-r--r--Cargo.lock15
-rw-r--r--Cargo.toml9
-rw-r--r--README.md23
-rw-r--r--src/app.rs135
-rw-r--r--src/args.rs23
-rw-r--r--src/config.rs39
-rw-r--r--src/job.rs44
-rw-r--r--src/main.rs204
-rw-r--r--src/pipeline.rs258
-rw-r--r--src/ui.rs47
10 files changed, 565 insertions, 232 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 9ab738e..3e12495 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -112,6 +112,7 @@ dependencies = [
"bitflags",
"clap_lex",
"is-terminal",
+ "once_cell",
"strsim",
"termcolor",
]
@@ -419,7 +420,7 @@ dependencies = [
[[package]]
name = "glp"
-version = "0.1.2"
+version = "0.1.3"
dependencies = [
"chrono",
"clap",
@@ -429,7 +430,10 @@ dependencies = [
"json",
"ptree",
"reqwest",
+ "serde 1.0.147",
+ "sigpipe",
"spinners",
+ "thiserror",
"timeago",
"tokio",
]
@@ -1255,6 +1259,15 @@ dependencies = [
]
[[package]]
+name = "sigpipe"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5584bfb3e0d348139d8210285e39f6d2f8a1902ac06de343e06357d1d763d8e6"
+dependencies = [
+ "libc",
+]
+
+[[package]]
name = "siphasher"
version = "0.3.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/Cargo.toml b/Cargo.toml
index d9d2c23..5569ed3 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -7,13 +7,16 @@ edition = "2021"
[dependencies]
json = "*"
-tokio = { version = "1", features=["full"] }
-reqwest = "0.11"
+tokio = { version="1", features = ["full"] }
+reqwest = { version="0.11", features=["json"]}
futures = "0.3"
colored = "2"
ptree = "0.4"
humantime = "2.1.0"
-clap = { version="4", features=["string"] }
+clap = { version="4", features=["string", "cargo"] }
timeago = "0.4"
chrono = "0.4"
spinners = "4.1"
+thiserror = "1.0"
+serde = "1.0"
+sigpipe = "0.1.3"
diff --git a/README.md b/README.md
index e3aab55..10785af 100644
--- a/README.md
+++ b/README.md
@@ -6,6 +6,14 @@
Small CLI tool for fetching Gitlab pipeline states and other info.
+## How to install
+
+1. clone this repository
+2. run `cargo install --path=.`
+
+or download prebuild binaries for `amd64`
+[here](https://gitlab.com/imn1/glp/-/packages/).
+
## How to use
App uses following data sources:
@@ -20,20 +28,17 @@ App uses following data sources:
## Example usage
```
$ GLP_PRIVATE_TOKEN=123 glp 456 # fetches pipelines for project with ID 456
+$ echo "456" > .glp; glp --trace=-1 # fetches traces for all failed jobs from the most recent pipeline
+$ echo "456" > .glp; glp --trace=789 | less # fetches traces for all failed jobs from pipeline 789
```
-## How to install
-
-1. clone this repository
-2. run `cargo install --path=.`
-
-or download prebuild binaries for `amd64`
-[here](https://gitlab.com/imn1/glp/-/packages/).
-
## Changelog
-### 0.1.3 (unreleased)
+### 0.1.3
- spinner added
+- tracing added (--trace) - possibility to fetch failed jobs traces (logs)
+- inner refactoring
+- much better error hangling
### 0.1.2
- space between pipelines added
diff --git a/src/app.rs b/src/app.rs
new file mode 100644
index 0000000..b0254d9
--- /dev/null
+++ b/src/app.rs
@@ -0,0 +1,135 @@
+use crate::{
+ job::{Job, JobError},
+ pipeline::{Pipeline, PipelineError},
+ stage::Stage,
+};
+use std::{error::Error, sync::Arc};
+
+use crate::{config::Config, ui::Ui};
+
+pub struct App {
+ config: Arc<Config>,
+ ui: Ui,
+}
+
+impl App {
+ pub fn new(config: Arc<Config>, ui: Ui) -> Self {
+ Self { config, ui }
+ }
+
+ pub async fn run(&mut self) -> Result<(), Box<dyn Error + Send + Sync>> {
+ let result;
+
+ if let Some(trace) = &self.config.trace {
+ // 1. Test trace case.
+ if trace.starts_with('-') {
+ result = self
+ .trace_relatively(trace.chars().skip(1).collect::<String>().parse::<u8>()?)
+ .await;
+ } else {
+ result = self.trace_by_id(trace.to_string()).await;
+ }
+ } else if false {
+ // Other future cases.
+ unimplemented!()
+ } else {
+ // 2. Default behavior
+ result = self.run_without_arguments().await;
+ }
+
+ //...
+ match result {
+ Ok(_) => Ok(()),
+ Err(e) => {
+ self.ui.print_error(&e);
+ Err(e)
+ }
+ }
+ }
+
+ async fn run_without_arguments(&mut self) -> Result<(), Box<dyn Error + Send + Sync>> {
+ self.ui.start_spinner();
+
+ // 2. Fetch jobs for each running pipeline.
+ let pips = Pipeline::fetch_last(self.config.clone(), Some(self.config.limit)).await?;
+
+ // 3. print tree.
+ self.ui.stop_spinner();
+ self.ui.print_tree(pips);
+
+ Ok(())
+ }
+
+ async fn trace_relatively(&mut self, trace: u8) -> Result<(), Box<dyn Error + Send + Sync>> {
+ self.ui.start_spinner();
+ // Fetch last N (see trace param) pipelines and get the last one's stages.
+ let stages = match Pipeline::fetch_last(self.config.clone(), Some(trace))
+ .await?
+ .iter()
+ .last()
+ {
+ Some(pip) => pip.to_owned(),
+ None => return Err(Box::new(PipelineError::NotFound)),
+ }
+ .stages;
+ self.ui.stop_spinner();
+ self.print_job_traces(self.fetch_failed_jobs(stages).await?);
+
+ Ok(())
+ }
+
+ async fn trace_by_id(&self, pipeline_id: String) -> Result<(), Box<dyn Error + Send + Sync>> {
+ let stages = Pipeline::fetch_stages(self.config.clone(), pipeline_id).await?;
+
+ self.print_job_traces(self.fetch_failed_jobs(stages).await?);
+ Ok(())
+ }
+
+ async fn fetch_failed_jobs(
+ &self,
+ stages: Vec<Stage>,
+ ) -> Result<Vec<Job>, Box<dyn Error + Send + Sync>> {
+ let mut failed_jobs: Vec<Job> = vec![];
+
+ for stage in stages {
+ failed_jobs.append(
+ &mut self
+ .filter_failed_jobs_and_fetch_traces(stage.clone())
+ .await?,
+ );
+ }
+
+ if failed_jobs.is_empty() {
+ return Err(Box::new(PipelineError::NoFailedJobs));
+ }
+
+ Ok(failed_jobs)
+ }
+
+ async fn filter_failed_jobs_and_fetch_traces(
+ &self,
+ stage: Stage,
+ ) -> Result<Vec<Job>, JobError> {
+ // Get all failed jobs from all the stages and fetch trace logs.
+ let filtered_failed_jobs = stage
+ .jobs
+ .iter()
+ .filter(|job| job.is_failed())
+ .collect::<Vec<&Job>>();
+ let mut failed_jobs: Vec<Job> = vec![];
+
+ for job in filtered_failed_jobs {
+ let mut xjob = job.clone();
+ xjob.fetch_trace(self.config.clone()).await?;
+ failed_jobs.push(xjob);
+ }
+
+ Ok(failed_jobs)
+ }
+
+ fn print_job_traces(&self, jobs: Vec<Job>) {
+ for job in jobs {
+ self.ui.print_job_trace(job);
+ }
+ }
+}
diff --git a/src/args.rs b/src/args.rs
index 2a68c9b..b06acd6 100644
--- a/src/args.rs
+++ b/src/args.rs
@@ -1,15 +1,19 @@
-use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
+use clap::{crate_version, value_parser, Arg, ArgAction, ArgMatches, Command};
+
+pub const DEFAULT_LIMIT: u8 = 3;
pub fn parse() -> ArgMatches {
Command::new("glp")
.author("Hrdina Pavel <hrdina.pavel@gmail.com>")
.about("Gitlab pipeline status for command line.")
+ .version(crate_version!())
.arg(
Arg::new("project")
.short('p')
.long("project")
.action(ArgAction::Set)
- .value_parser(value_parser!(String)),
+ .value_parser(value_parser!(String))
+ .help("GitLab project ID."),
)
.arg(
Arg::new("limit")
@@ -17,13 +21,24 @@ pub fn parse() -> ArgMatches {
.long("limit")
.action(ArgAction::Set)
.value_parser(value_parser!(u8))
- .default_value(super::DEFAULT_LIMIT.to_string()),
+ .default_value(DEFAULT_LIMIT.to_string())
+ .help("Limit number of fetched pipelines."),
)
.arg(
Arg::new("finished")
.short('f')
.long("finished")
- .action(ArgAction::SetTrue),
+ .action(ArgAction::SetTrue)
+ .help("Care only about finished pipelines."),
+ )
+ .arg(
+ Arg::new("trace")
+ .long("trace")
+ .action(ArgAction::Set)
+ .value_parser(value_parser!(String))
+ .value_name("pipeline")
+ .conflicts_with("limit")
+ .help("Fetches traces (logs) from all failed pipeline jobs. Specify pipeline ID or it's relative number with '-' prefix."),
)
.get_matches()
}
diff --git a/src/config.rs b/src/config.rs
new file mode 100644
index 0000000..aa7f03f
--- /dev/null
+++ b/src/config.rs
@@ -0,0 +1,39 @@
+use clap::ArgMatches;
+use std::env;
+use tokio::fs;
+
+pub struct Config {
+ pub project_id: String,
+ pub private_token: String,
+ pub limit: u8,
+ pub finished: bool,
+ pub trace: Option<String>,
+ _args: ArgMatches,
+}
+
+impl Config {
+ pub async fn new(args: ArgMatches) -> Self {
+ // Project ID.
+ let project_id = match args.get_one::<String>("project") {
+ Some(id) => id.to_owned(),
+ None => fs::read_to_string(".glp")
+ .await
+ .expect("No project ID (no parameter nor .glp file."),
+ };
+
+ let finished = args.get_one::<bool>("finished").unwrap().to_owned();
+ let private_token = env::var("GLP_PRIVATE_TOKEN")
+ .expect("No Gitlab private token found - set GLP_PRIVATE_TOKEN environment variable.");
+ let limit = args.get_one::<u8>("limit").unwrap().to_owned();
+ let trace = args.get_one::<String>("trace").cloned();
+
+ Self {
+ project_id,
+ private_token,
+ limit,
+ finished,
+ trace,
+ _args: args,
+ }
+ }
+}
diff --git a/src/job.rs b/src/job.rs
index 7e920ea..29aa1ae 100644
--- a/src/job.rs
+++ b/src/job.rs
@@ -1,10 +1,23 @@
+use crate::config::Config;
use crate::Label;
use humantime::format_duration;
use std::borrow::Cow;
use std::io;
+use std::sync::Arc;
use std::time::Duration;
+use thiserror::Error;
-#[derive(Debug, Clone)]
+#[derive(Error, Debug)]
+pub enum JobError {
+ // #[error("Job not found.")]
+ // NotFound,
+ #[error("Fetching error: {0:?}")]
+ FetchingError(#[source] reqwest::Error),
+ #[error("Parsing error: {0:?}.")]
+ ParsingError(#[source] reqwest::Error),
+}
+
+#[derive(Debug, Clone, Default)]
pub struct Job {
pub id: String,
pub name: Label,
@@ -13,6 +26,7 @@ pub struct Job {
pub stage: String,
pub started_at: Option<String>,
pub duration: Option<Duration>,
+ pub trace: Option<String>,
}
impl ptree::TreeItem for Job {
@@ -38,3 +52,31 @@ impl ptree::TreeItem for Job {
Cow::from(vec![])
}
}
+
+impl Job {
+ pub fn is_failed(&self) -> bool {
+ self.status == "failed"
+ }
+
+ pub async fn fetch_trace(&mut self, config: Arc<Config>) -> Result<(), JobError> {
+ // GET /projects/:id/jobs/:job_id/trace
+
+ let client = reqwest::Client::new();
+ self.trace = Some(
+ client
+ .get(format!(
+ "https://gitlab.com/api/v4/projects/{}/jobs/{}/trace",
+ &config.project_id, &self.id
+ ))
+ .header("PRIVATE-TOKEN", &config.private_token)
+ .send()
+ .await
+ .map_err(JobError::FetchingError)?
+ .text()
+ .await
+ .map_err(JobError::ParsingError)?,
+ );
+
+ Ok(())
+ }
+}
diff --git a/src/main.rs b/src/main.rs
index 3ff4ee5..6e9a2fb 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1,30 +1,19 @@
mod args;
+
+mod app;
+mod config;
mod job;
mod pipeline;
mod stage;
mod ui;
-use crate::job::Job;
-use crate::pipeline::Pipeline;
-use crate::stage::Stage;
+use crate::app::App;
+use crate::config::Config;
use crate::ui::Ui;
-use std::cmp::Ordering;
-use std::collections::HashMap;
-use std::env;
use std::sync::Arc;
-use std::time::Duration;
use colored::*;
-use futures::future::join_all;
-use tokio::fs;
-use tokio::sync::Semaphore;
-
-const DEFAULT_LIMIT: u8 = 3;
-const SEMAPHORE_LIMIT: usize = 10;
-
-// trait Labelable {}
-
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, Default)]
pub struct Label(String);
impl Label {
@@ -43,178 +32,17 @@ impl Label {
/// Takes following poritional arguments:
/// - project ID
-#[tokio::main]
-pub async fn main() -> Result<(), Box<dyn std::error::Error>> {
- // 0. Parse arguments.
- let app_args = args::parse();
- let project_id = match app_args.get_one::<String>("project") {
- Some(id) => id.to_owned(),
- None => fs::read_to_string(".glp")
- .await
- .expect("No project ID (no parameter nor .glp file."),
- };
- let show_finished = app_args.get_one::<bool>("finished").unwrap().to_owned();
- let mut ui = Ui::from_args(app_args.clone());
-
- let private_token = env::var("GLP_PRIVATE_TOKEN")
- .expect("No Gitlab private token found - set GLP_PRIVATE_TOKEN environment variable.");
-
- // 1. Fetch pipelines.
- ui.start_spinner();
- let client = reqwest::Client::new();
- let response = client
- .get(format!(
- "https://gitlab.com/api/v4/projects/{}/pipelines?per_page={}",
- project_id,
- app_args.get_one::<u8>("limit").unwrap()
- ))
- .header("PRIVATE-TOKEN", &private_token)
- .send()
- .await?
- .text()
- .await?;
-
- let pipelines = json::parse(&response)?;
-
- // 2. Fetch jobs for each running pipeline.
- let mut tasks = vec![];
- let semaphore = Arc::new(Semaphore::new(SEMAPHORE_LIMIT));
-
- for i in 0..pipelines.len() {
- let pip = pipelines[i].clone();
- let private_token = private_token.clone();
- let project_id = project_id.clone();
-
- // Acquire semaphore lock.
- let semaphore_permit = semaphore.clone().acquire_owned().await.unwrap();
-
- tasks.push(tokio::spawn(async move {
- // Fetch jobs for current pipeline.
- let client = reqwest::Client::new();
- let response = client
- .get(format!(
- "https://gitlab.com/api/v4/projects/{}/pipelines/{}/jobs",
- project_id, pip["id"]
- ))
- .header("PRIVATE-TOKEN", &private_token)
- .send()
- .await
- .unwrap()
- .text()
- .await
- .unwrap();
-
- // println!(
- // "gitlab response: {}",
- // response.chars().take(50).collect::<String>()
- // );
-
- let jobs = json::parse(&response).unwrap();
- let mut stages: HashMap<String, Vec<Job>> = HashMap::new();
-
- for j in 0..jobs.len() {
- let job = &jobs[j];
- let pip_job = Job {
- id: job["id"].as_usize().unwrap().to_string(),
- name: Label(job["name"].as_str().unwrap().to_string()),
- status: job["status"].as_str().unwrap().to_string(),
- web_url: job["web_url"].as_str().unwrap().to_string(),
- stage: job["stage"].as_str().unwrap().to_string(),
- started_at: match job["started_at"].is_null() {
- true => None,
- false => Some(job["started_at"].as_str().unwrap().to_string()),
- },
- duration: match job["duration"].is_null() {
- true => None,
- false => Some(Duration::from_secs_f64(job["duration"].as_f64().unwrap())),
- },
- };
-
- if stages.contains_key(&pip_job.stage) {
- stages.get_mut(&pip_job.stage).unwrap().push(pip_job);
- } else {
- stages.insert(pip_job.stage.clone(), vec![pip_job]);
- }
- }
-
- let mut pip_stages = vec![];
-
- // Convert hashmap to vec of stages
- for (stage, jobs) in stages.into_iter() {
- pip_stages.push(Stage {
- name: Label(stage),
- jobs,
- });
- }
-
- // Sort stages by job "started_at" times.
- // None are always classifiead as "greater"
- // so they end up as "last".
- pip_stages.sort_by(|a, b| {
- let mut a_jobs = a
- .jobs
- .iter()
- .filter(|j| j.started_at.is_some())
- .collect::<Vec<&Job>>();
- a_jobs.sort_by_key(|j| j.started_at.clone());
-
- let mut b_jobs = b
- .jobs
- .iter()
- .filter(|j| j.started_at.is_some())
- .collect::<Vec<&Job>>();
- b_jobs.sort_by_key(|j| j.started_at.clone());
-
- let a_started_at = match a_jobs.get(0) {
- Some(j) => j.started_at.clone(),
- _ => None,
- };
- let b_started_at = match b_jobs.get(0) {
- Some(j) => j.started_at.clone(),
- _ => None,
- };
-
- if a_started_at.is_none() {
- return Ordering::Greater;
- }
- if b_started_at.is_none() {
- return Ordering::Less;
- }
-
- a_started_at.partial_cmp(&b_started_at).unwrap()
- });
-
- let mut pip = Pipeline {
- id: Label(pip["id"].as_usize().unwrap().to_string()),
- git_ref: pip["ref"].as_str().unwrap().to_string(),
- status: pip["status"].as_str().unwrap().to_string(),
- stages: pip_stages,
- show_finished,
- details: None,
- };
-
- // Fetch details only if needed.
- if show_finished {
- pip.fetch_details(&private_token, &project_id).await;
- }
-
- // Free acquired semaphore lock.
- drop(semaphore_permit);
-
- pip
- }));
- }
-
- let pips: Vec<_> = join_all(tasks)
- .await
- .into_iter()
- .map(|i| i.unwrap())
- .collect();
+#[tokio::main]
+pub async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
+ // Fixes "failed printing to stdout: Broken pipe" known as sigpipe issue
+ // https://github.com/rust-lang/rust/issues/97889
+ sigpipe::reset();
- // 3. print tree.
- ui.stop_spinner();
- ui.print_tree(pips);
+ // Create config and UI instances.
+ let config = Arc::new(Config::new(args::parse()).await);
+ let ui = Ui::from_args(config.clone());
- Ok(())
+ // Run the app.
+ App::new(config, ui).run().await
}
diff --git a/src/pipeline.rs b/src/pipeline.rs
index 87b1a8e..d4a95f6 100644
--- a/src/pipeline.rs
+++ b/src/pipeline.rs
@@ -1,11 +1,34 @@
+use crate::args::DEFAULT_LIMIT;
+use crate::config::Config;
+use crate::job::Job;
use crate::stage::Stage;
use crate::Label;
use chrono::{offset::Local, DateTime};
+use futures::future::try_join_all;
use humantime::format_duration;
use json::JsonValue;
use std::borrow::Cow;
+use std::cmp::Ordering;
+use std::collections::HashMap;
use std::io;
+use std::sync::Arc;
use std::time::Duration;
+use thiserror::Error;
+use tokio::sync::Semaphore;
+
+const SEMAPHORE_LIMIT: usize = 10;
+
+#[derive(Error, Debug)]
+pub enum PipelineError {
+ #[error("Pipeline not found.")]
+ NotFound,
+ #[error("No failed jobs found.")]
+ NoFailedJobs,
+ #[error("Fetching error.")]
+ FetchingError,
+ #[error("Parsing error.")]
+ ParsingError,
+}
/// Represents Gitlab pipeline.
#[derive(Debug, Clone)]
@@ -49,6 +72,203 @@ impl ptree::TreeItem for Pipeline {
}
impl Pipeline {
+ /// Fetches last N (see limit param) pipelines.
+ async fn fetch_last_pipelines(
+ private_token: &str,
+ project_id: &str,
+ limit: &u8,
+ ) -> Result<JsonValue, PipelineError> {
+ // Fetch pipelines.
+ let client = reqwest::Client::new();
+ json::parse(
+ client
+ .get(format!(
+ "https://gitlab.com/api/v4/projects/{}/pipelines?per_page={}",
+ project_id, limit
+ ))
+ .header("PRIVATE-TOKEN", private_token)
+ .send()
+ .await
+ .map_err(|_| PipelineError::FetchingError)?
+ .text()
+ .await
+ .map_err(|_| PipelineError::FetchingError)?
+ .as_str(),
+ )
+ .map_err(|_| PipelineError::ParsingError)
+ }
+
+ // Fetch stages + jobs for the given pipeline.
+ pub async fn fetch_stages(
+ config: Arc<Config>,
+ pipeline_id: String,
+ ) -> Result<Vec<Stage>, PipelineError> {
+ // Fetch data.
+ let client = reqwest::Client::new();
+ let jobs = json::parse(
+ client
+ .get(format!(
+ "https://gitlab.com/api/v4/projects/{}/pipelines/{}/jobs",
+ &config.project_id, &pipeline_id
+ ))
+ .header("PRIVATE-TOKEN", &config.private_token)
+ .send()
+ .await
+ .map_err(|_| PipelineError::FetchingError)?
+ .text()
+ .await
+ .map_err(|_| PipelineError::FetchingError)?
+ .as_str(),
+ )
+ .map_err(|_| PipelineError::FetchingError)?;
+
+ // Check for errors.
+ if jobs.has_key("error") {
+ return Err(PipelineError::NotFound);
+ }
+
+ // Process JSON response.
+ // Construct jobs.
+ let mut stages: HashMap<String, Vec<Job>> = HashMap::new();
+
+ for j in 0..jobs.len() {
+ let job = &jobs[j];
+
+ let pip_job = Job {
+ id: job["id"].as_usize().unwrap().to_string(),
+ name: Label(job["name"].as_str().unwrap().to_string()),
+ status: job["status"].as_str().unwrap().to_string(),
+ web_url: job["web_url"].as_str().unwrap().to_string(),
+ stage: job["stage"].as_str().unwrap().to_string(),
+ started_at: match job["started_at"].is_null() {
+ true => None,
+ false => Some(job["started_at"].as_str().unwrap().to_string()),
+ },
+ duration: match job["duration"].is_null() {
+ true => None,
+ false => Some(Duration::from_secs_f64(job["duration"].as_f64().unwrap())),
+ },
+ ..Default::default()
+ };
+
+ if stages.contains_key(&pip_job.stage) {
+ stages.get_mut(&pip_job.stage).unwrap().push(pip_job);
+ } else {
+ stages.insert(pip_job.stage.clone(), vec![pip_job]);
+ }
+ }
+
+ let mut pip_stages = vec![];
+
+ // Convert hashmap to vec of stages
+ for (stage, jobs) in stages.into_iter() {
+ pip_stages.push(Stage {
+ name: Label(stage),
+ jobs,
+ });
+ }
+
+ // Sort stages by job "started_at" times.
+ // None are always classified as "greater"
+ // so they end up as "last".
+ pip_stages.sort_by(|a, b| {
+ let mut a_jobs = a
+ .jobs
+ .iter()
+ .filter(|j| j.started_at.is_some())
+ .collect::<Vec<&Job>>();
+ a_jobs.sort_by_key(|j| j.started_at.clone());
+
+ let mut b_jobs = b
+ .jobs
+ .iter()
+ .filter(|j| j.started_at.is_some())
+ .collect::<Vec<&Job>>();
+ b_jobs.sort_by_key(|j| j.started_at.clone());
+
+ let a_started_at = match a_jobs.get(0) {
+ Some(j) => j.started_at.clone(),
+ _ => None,
+ };
+ let b_started_at = match b_jobs.get(0) {
+ Some(j) => j.started_at.clone(),
+ _ => None,
+ };
+
+ if a_started_at.is_none() {
+ return Ordering::Greater;
+ }
+ if b_started_at.is_none() {
+ return Ordering::Less;
+ }
+
+ a_started_at.partial_cmp(&b_started_at).unwrap()
+ });
+
+ Ok(pip_stages)
+ }
+
+ /// ...
+ pub async fn fetch_last(
+ config: Arc<Config>,
+ limit: Option<u8>,
+ ) -> Result<Vec<Self>, PipelineError> {
+ let pipelines = Self::fetch_last_pipelines(
+ &config.private_token,
+ &config.project_id,
+ &limit.unwrap_or(DEFAULT_LIMIT),
+ )
+ .await?;
+
+ // 2. Fetch jobs for each running pipeline.
+ let mut tasks = vec![];
+ let semaphore = Arc::new(Semaphore::new(SEMAPHORE_LIMIT));
+
+ for i in 0..pipelines.len() {
+ let pip = pipelines[i].clone();
+
+ // Acquire semaphore lock.
+ let semaphore_permit = semaphore.clone().acquire_owned().await.unwrap();
+ let xconfig = config.clone();
+
+ tasks.push(tokio::spawn(async move {
+ let pip_stages =
+ Self::fetch_stages(xconfig.clone(), pip["id"].clone().to_string()).await?;
+
+ let mut pip = Pipeline {
+ id: Label(pip["id"].as_usize().unwrap().to_string()),
+ git_ref: pip["ref"].as_str().unwrap().to_string(),
+ status: pip["status"].as_str().unwrap().to_string(),
+ stages: pip_stages,
+ show_finished: xconfig.finished,
+ details: None,
+ };
+
+ // Fetch details only if needed.
+ if xconfig.finished {
+ pip.fetch_details(&xconfig.private_token, &xconfig.project_id)
+ .await?;
+ }
+
+ // Free acquired semaphore lock.
+ drop(semaphore_permit);
+
+ // We need to explicitly tell the return types here.
+ Ok::<Pipeline, PipelineError>(pip)
+ }));
+ }
+
+ // Result gets compiled from collect (FromIterator).
+ // Once any task contains an Err() instead of Ok()
+ // the whole collect fails (and maps to PipelineError from
+ // the method definition).
+ try_join_all(tasks)
+ .await
+ .map_err(|_| PipelineError::FetchingError)?
+ .into_iter()
+ .collect()
+ }
+
fn is_finished(&self) -> bool {
"success" == self.status || "failed" == self.status
}
@@ -72,23 +292,33 @@ impl Pipeline {
}
/// Fetches pipeline details from Gitlab API.
- pub async fn fetch_details(&mut self, private_token: &str, project_id: &str) {
+ pub async fn fetch_details(
+ &mut self,
+ private_token: &str,
+ project_id: &str,
+ ) -> Result<(), PipelineError> {
// Fetch jobs for current pipeline.
let client = reqwest::Client::new();
- let response = client
- .get(format!(
- "https://gitlab.com/api/v4/projects/{}/pipelines/{}",
- project_id, &self.id.0
- ))
- .header("PRIVATE-TOKEN", private_token)
- .send()
- .await
- .unwrap()
- .text()
- .await
- .unwrap();
+ self.details = Some(
+ json::parse(
+ client
+ .get(format!(
+ "https://gitlab.com/api/v4/projects/{}/pipelines/{}",
+ project_id, &self.id.0
+ ))
+ .header("PRIVATE-TOKEN", private_token)
+ .send()
+ .await
+ .map_err(|_| PipelineError::FetchingError)?
+ .text()
+ .await
+ .map_err(|_| PipelineError::FetchingError)?
+ .as_str(),
+ )
+ .map_err(|_| PipelineError::ParsingError)?,
+ );
- self.details = Some(json::parse(&response).unwrap());
+ Ok(())
}
/// Calculates (if available) relative time when the
diff --git a/src/ui.rs b/src/ui.rs
index 4c2316f..709eda6 100644
--- a/src/ui.rs
+++ b/src/ui.rs
@@ -1,21 +1,22 @@
+use std::error::Error;
use std::io::{self, Write};
+use std::sync::Arc;
-use clap::ArgMatches;
use spinners::{Spinner, Spinners};
+use crate::config::Config;
+use crate::job::Job;
use crate::pipeline::Pipeline;
-struct UiConfig;
-
pub struct Ui {
- config: UiConfig,
+ _config: Arc<Config>,
spinner: Option<Spinner>,
}
impl Ui {
- pub fn from_args(_args: ArgMatches) -> Self {
+ pub fn from_args(config: Arc<Config>) -> Self {
Self {
- config: UiConfig {},
+ _config: config,
spinner: None,
}
}
@@ -38,12 +39,34 @@ impl Ui {
}
pub fn stop_spinner(&mut self) {
- match self.spinner.as_mut() {
- Some(spinner) => {
- spinner.stop();
- print!("\x1b[2K\r");
- }
- _ => (),
+ if let Some(spinner) = self.spinner.as_mut() {
+ spinner.stop();
+ print!("\x1b[2K\r");
};
}
+
+ /// Prints nicely job trace (log) with a header.
+ pub fn print_job_trace(&self, job: Job) {
+ let no_color_title = format!("Job: {} ID: {} URL: {}", &job.name.0, &job.id, &job.web_url);
+ let title = format!(
+ "Job: {} ID: {} URL: {}",
+ &job.name.to_string(&job.status),
+ &job.id,
+ &job.web_url
+ );
+ let size = no_color_title.chars().count();
+ println!("{}", "-".repeat(size));
+ println!("{}", title);
+ println!("{}", "-".repeat(size));
+ println!("{}", &job.trace.unwrap_or("No trace (yet) ...".to_string()));
+ println!("{}", "-".repeat(size));
+ }
+
+ pub fn print_error(&self, err: &Box<dyn Error + Send + Sync>) {
+ eprintln!("Error: {}", err);
+
+ if let Some(source) = err.source() {
+ eprintln!("Original error: {}", source);
+ }
+ }
}