Skip to content
Snippets Groups Projects
Verified Commit bdc6df2f authored by Rafael Epplée's avatar Rafael Epplée
Browse files

Switch to alpm-srcinfo

parent 9f3c2649
No related branches found
No related tags found
No related merge requests found
Pipeline #123276 passed
......@@ -32,6 +32,45 @@ version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "alpm-srcinfo"
version = "0.1.0"
source = "git+https://gitlab.archlinux.org/archlinux/alpm/alpm.git?branch=main#9518749c385c2ee2d865a08c61be88ba53b040ca"
dependencies = [
"alpm-types",
"clap",
"colored",
"serde",
"serde_json",
"strum",
"thiserror 2.0.11",
"winnow",
]
[[package]]
name = "alpm-types"
version = "0.5.0"
source = "git+https://gitlab.archlinux.org/archlinux/alpm/alpm.git?branch=main#9518749c385c2ee2d865a08c61be88ba53b040ca"
dependencies = [
"blake2",
"digest",
"email_address",
"lazy-regex",
"md-5",
"regex",
"semver",
"serde",
"serde_json",
"sha1",
"sha2",
"spdx",
"strum",
"thiserror 2.0.11",
"time",
"url",
"winnow",
]
[[package]]
name = "android-tzdata"
version = "0.1.1"
......@@ -361,6 +400,15 @@ dependencies = [
"serde",
]
[[package]]
name = "blake2"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe"
dependencies = [
"digest",
]
[[package]]
name = "block-buffer"
version = "0.10.4"
......@@ -374,6 +422,8 @@ dependencies = [
name = "buildbtw"
version = "0.1.0"
dependencies = [
"alpm-srcinfo",
"alpm-types",
"anyhow",
"axum 0.8.1",
"axum-extra",
......@@ -397,7 +447,6 @@ dependencies = [
"serde",
"serde_json",
"sqlx",
"srcinfo",
"time",
"tokio",
"tracing",
......@@ -554,7 +603,7 @@ version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e"
dependencies = [
"windows-sys 0.59.0",
"windows-sys 0.48.0",
]
[[package]]
......@@ -901,6 +950,15 @@ dependencies = [
"serde",
]
[[package]]
name = "email_address"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449"
dependencies = [
"serde",
]
[[package]]
name = "encoding_rs"
version = "0.8.35"
......@@ -923,7 +981,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
dependencies = [
"libc",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
......@@ -1783,6 +1841,29 @@ dependencies = [
"log",
]
[[package]]
name = "lazy-regex"
version = "3.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60c7310b93682b36b98fa7ea4de998d3463ccbebd94d935d6b48ba5b6ffa7126"
dependencies = [
"lazy-regex-proc_macros",
"once_cell",
"regex",
]
[[package]]
name = "lazy-regex-proc_macros"
version = "3.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ba01db5ef81e17eb10a5e0f2109d1b3a3e29bac3070fdbd7d156bf7dbd206a1"
dependencies = [
"proc-macro2",
"quote",
"regex",
"syn 2.0.98",
]
[[package]]
name = "lazy_static"
version = "1.5.0"
......@@ -2390,7 +2471,7 @@ dependencies = [
"once_cell",
"socket2",
"tracing",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
......@@ -2662,7 +2743,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
......@@ -2911,6 +2992,15 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "spdx"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58b69356da67e2fc1f542c71ea7e654a361a79c938e4424392ecf4fa065d2193"
dependencies = [
"smallvec",
]
[[package]]
name = "spin"
version = "0.9.8"
......@@ -3125,15 +3215,6 @@ dependencies = [
"uuid",
]
[[package]]
name = "srcinfo"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4514a89ef6fc82ff66e93012ada1aef67b8ee09489d745def742e26aa9cd6bdc"
dependencies = [
"serde",
]
[[package]]
name = "stable_deref_trait"
version = "1.2.0"
......@@ -3157,6 +3238,28 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "strum"
version = "0.27.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32"
dependencies = [
"strum_macros",
]
[[package]]
name = "strum_macros"
version = "0.27.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c77a8c5abcaf0f9ce05d62342b7d298c346515365c36b673df4ebe3ced01fde8"
dependencies = [
"heck 0.5.0",
"proc-macro2",
"quote",
"rustversion",
"syn 2.0.98",
]
[[package]]
name = "subtle"
version = "2.6.1"
......@@ -3237,7 +3340,7 @@ dependencies = [
"getrandom 0.3.1",
"once_cell",
"rustix",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
......@@ -3650,6 +3753,7 @@ dependencies = [
"form_urlencoded",
"idna",
"percent-encoding",
"serde",
]
[[package]]
......
......@@ -7,6 +7,8 @@ edition = "2021"
fake-pkgbuild = []
[dependencies]
alpm-srcinfo = { git = "https://gitlab.archlinux.org/archlinux/alpm/alpm.git", branch = "main" }
alpm-types = { git = "https://gitlab.archlinux.org/archlinux/alpm/alpm.git", branch = "main" }
anyhow = { version = "1.0.95", features = ["backtrace"] }
axum = { version = "0.8.1", features = ["http2", "macros"] }
axum-extra = "0.10.0"
......@@ -44,7 +46,6 @@ sqlx = { version = "0.8.3", features = [
"json",
"time",
] }
srcinfo = { version = "1.3.1", features = ["serde"] }
time = { version = "0.3.37", features = ["formatting", "parsing", "serde"] }
tokio = { version = "1.43.0", features = [
"fs",
......
......@@ -94,7 +94,7 @@ async fn update_and_build_all_namespaces(
// Check all build namespaces and see if they need a new iteration.
let namespaces = db::namespace::list(pool).await?;
let namespace_count = namespaces.len();
tracing::info!("Updating and building {namespace_count} namespace(s)...");
tracing::info!("Updating and dispatching builds for {namespace_count} namespace(s)...");
for namespace in namespaces {
create_new_namespace_iteration_if_needed(pool, &namespace).await?;
if let Some(gitlab_client) = maybe_gitlab_client {
......@@ -102,6 +102,7 @@ async fn update_and_build_all_namespaces(
}
schedule_next_build_if_needed(pool, &namespace, maybe_gitlab_client).await?;
}
tracing::info!("Updated and dispatched builds");
Ok(())
}
......@@ -319,10 +320,7 @@ async fn schedule_build(
build: &ScheduleBuild,
maybe_gitlab_client: Option<&AsyncGitlab>,
) -> Result<()> {
tracing::info!(
"Building pending package for namespace: {:?}",
build.srcinfo.base.pkgbase
);
tracing::info!("Building pending package: {:?}", build.source);
if let Some(client) = maybe_gitlab_client {
let pipeline_response = buildbtw::gitlab::create_pipeline(client).await?;
......
......@@ -15,11 +15,11 @@ pub fn start(modify_gpg_keyring: bool) -> UnboundedSender<Message> {
while let Some(msg) = receiver.recv().await {
match msg {
Message::BuildPackage(schedule) => {
tracing::info!("🕑 Building package {:?}", schedule.srcinfo.base.pkgbase);
tracing::info!("🕑 Building package {:?}", schedule.source.0);
let result_status = build_package(&schedule, modify_gpg_keyring).await;
tracing::info!(
"✅ build finished for {:?} ({result_status:?})",
schedule.srcinfo.base.pkgbase
schedule.source.0
);
// TODO: exponential backoff
......
//! Build a package locally by essentially running `pkgctl build`.
use std::process::Stdio;
use std::{collections::HashSet, process::Stdio};
use anyhow::anyhow;
use camino::{Utf8Path, Utf8PathBuf};
......@@ -131,7 +131,7 @@ async fn checkout_build_git_ref(path: &Utf8Path, schedule: &ScheduleBuild) -> Re
// Replace the real PKGBUILD with a fake PKGBUILD to speed up compilation during testing.
if cfg!(feature = "fake-pkgbuild") {
let pkgbuild = generate_fake_pkgbuild(schedule);
let pkgbuild = generate_fake_pkgbuild(schedule)?;
let pkgbuild_path = path.join("PKGBUILD");
tracing::info!("Writing fake PKGBUILD to {pkgbuild_path}");
fs::write(pkgbuild_path, pkgbuild).await?;
......@@ -140,22 +140,31 @@ async fn checkout_build_git_ref(path: &Utf8Path, schedule: &ScheduleBuild) -> Re
Ok(())
}
fn generate_fake_pkgbuild(schedule: &ScheduleBuild) -> String {
fn generate_fake_pkgbuild(schedule: &ScheduleBuild) -> Result<String> {
let srcinfo = schedule.srcinfo.get_source_info()?;
let pkgnames = format!(
"({})",
schedule
.srcinfo
.pkgs
srcinfo
.packages
.iter()
.map(|pkg| pkg.pkgname.clone())
.map(|pkg| pkg.name.as_ref())
.collect::<Vec<_>>()
.join(" ")
);
// Generate stub package_foo() functions
let mut package_funcs = String::new();
for pkg in &schedule.srcinfo.pkgs {
let pkgarchs = format!("({})", pkg.arch.to_vec().join(" "));
for pkg in &srcinfo.packages {
let pkgarchs = format!(
"({})",
pkg.architectures
.as_ref()
.unwrap_or(&HashSet::new())
.iter()
.map(|a| a.to_string())
.collect::<Vec<_>>()
.join(" ")
);
let func = format!(
r#"
......@@ -164,32 +173,44 @@ package_{pkgname}() {{
echo 1
}}
"#,
pkgname = pkg.pkgname,
pkgname = pkg.name,
pkgarch = pkgarchs,
);
package_funcs.push_str(&func);
}
format!(
Ok(format!(
r#"
pkgbase={pkgbase}
pkgname={pkgname}
pkgver={pkgver}
pkgrel={pkgrel}
pkgdesc=dontcare
arch=(any)
arch=({arch})
license=('Apache-2.0')
url=https://example.com
source=()
{package_funcs}
"#,
pkgbase = schedule.srcinfo.base.pkgbase,
pkgbase = srcinfo.base.name,
pkgname = pkgnames,
pkgver = schedule.srcinfo.base.pkgver,
pkgrel = schedule.srcinfo.base.pkgrel,
)
pkgver = srcinfo.base.version.pkgver,
pkgrel = srcinfo
.base
.version
.pkgrel
.map(|r| r.to_string())
.unwrap_or("1".to_string()),
arch = srcinfo
.base
.architectures
.iter()
.map(|a| a.to_string())
.collect::<Vec<_>>()
.join(" ")
))
}
/// Return file paths for dependencies that were built in a previous step
......
......@@ -2,13 +2,13 @@
use std::collections::{HashSet, VecDeque};
use std::{collections::HashMap, fs::read_dir};
use alpm_srcinfo::SourceInfo;
use anyhow::{anyhow, Context, Result};
use git2::Repository;
use petgraph::visit::{Bfs, EdgeRef, Walker};
use petgraph::Directed;
use petgraph::{graph::NodeIndex, prelude::StableGraph, Graph};
use serde::{Deserialize, Serialize};
use srcinfo::Srcinfo;
use tokio::task::spawn_blocking;
use uuid::Uuid;
......@@ -16,14 +16,13 @@ use crate::git::{get_branch_commit_sha, package_source_path, read_srcinfo_from_r
use crate::{
BuildNamespace, BuildPackageOutput, BuildSetIteration, GitRef, GitRepoRef,
PackageBuildDependency, PackageBuildStatus, Pkgbase, Pkgname, ScheduleBuild,
ScheduleBuildResult,
ScheduleBuildResult, SourceInfoString,
};
/// For tracking dependencies between individual packages.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct PackageNode {
pub pkgname: String,
pub commit_hash: String,
}
/// Like PackageNode, but for a single PKGBUILD,
......@@ -33,7 +32,7 @@ pub struct BuildPackageNode {
pub pkgbase: String,
pub commit_hash: String,
pub status: PackageBuildStatus,
pub srcinfo: Srcinfo,
pub srcinfo: SourceInfoString,
/// Packages that this build will emit
pub build_outputs: Vec<BuildPackageOutput>,
}
......@@ -46,6 +45,10 @@ pub struct BuildPackageNode {
pub type BuildSetGraph = Graph<BuildPackageNode, PackageBuildDependency, Directed>;
pub async fn calculate_packages_to_be_built(namespace: &BuildNamespace) -> Result<BuildSetGraph> {
tracing::debug!(
"Calculating packages to be built for namespace: {}",
namespace.name
);
let pkgname_to_srcinfo_map =
build_pkgname_to_srcinfo_map(namespace.current_origin_changesets.clone())
.await
......@@ -62,7 +65,7 @@ pub async fn calculate_packages_to_be_built(namespace: &BuildNamespace) -> Resul
)
.await;
tracing::info!("Build set graph calculated");
tracing::debug!("Build set graph calculated");
packages
}
......@@ -70,9 +73,10 @@ pub async fn calculate_packages_to_be_built(namespace: &BuildNamespace) -> Resul
async fn calculate_packages_to_be_built_inner(
namespace: &BuildNamespace,
global_graph: &StableGraph<PackageNode, PackageBuildDependency>,
pkgname_to_srcinfo_map: &HashMap<Pkgname, (Srcinfo, GitRef)>,
pkgname_to_srcinfo_map: &HashMap<Pkgname, (SourceInfoString, SourceInfo, GitRef)>,
pkgname_to_node_index: &HashMap<Pkgname, NodeIndex>,
) -> Result<BuildSetGraph> {
tracing::debug!("Collecting reverse dependencies for rebuild");
// We have the global graph. Based on this, find the precise graph of dependents for the
// given Pkgbases.
let mut packages_to_be_built: BuildSetGraph = Graph::new();
......@@ -88,9 +92,9 @@ async fn calculate_packages_to_be_built_inner(
// add root nodes from our build namespace so we can start walking the graph
for (pkgbase, branch) in &namespace.current_origin_changesets {
let repo = Repository::open(package_source_path(pkgbase))?;
let srcinfo = read_srcinfo_from_repo(&repo, branch)?;
for package in srcinfo.pkgs {
let pkgname = package.pkgname;
let srcinfo = read_srcinfo_from_repo(&repo, branch)?.get_source_info()?;
for package in srcinfo.packages {
let pkgname = package.name.to_string();
let node_index = pkgname_to_node_index
.get(&pkgname)
.ok_or_else(|| anyhow!("Failed to get index for pkgname {pkgname}"))?;
......@@ -105,10 +109,10 @@ async fn calculate_packages_to_be_built_inner(
let package_node = global_graph
.node_weight(global_node_index_to_visit)
.ok_or_else(|| anyhow!("Failed to find node in global dependency graph"))?;
let (srcinfo, _) = pkgname_to_srcinfo_map
let (srcinfo_string, source_info, commit_hash) = pkgname_to_srcinfo_map
.get(&package_node.pkgname)
.ok_or_else(|| anyhow!("Failed to get srcinfo for pkgname {}", package_node.pkgname))?;
let pkgbase = srcinfo.base.pkgbase.clone();
let pkgbase = source_info.base.name.to_string();
// Create build graph node if it doesn't exist
let build_graph_node_index =
......@@ -126,20 +130,23 @@ async fn calculate_packages_to_be_built_inner(
.any(|p| p == &pkgbase);
// Add this node to the buildset graph
let build_outputs = srcinfo
.pkgs
let build_outputs = source_info
.packages
.iter()
.map(|pkg| BuildPackageOutput {
pkgbase: srcinfo.base.pkgbase.clone(),
pkgname: pkg.pkgname.clone(),
arch: pkg.arch.clone(),
version: srcinfo.version(),
pkgbase: source_info.base.name.to_string(),
pkgname: pkg.name.to_string(),
arch: pkg
.architectures
.clone()
.map(|set| set.iter().map(|a| a.to_string()).collect()),
version: source_info.base.version.to_string(),
})
.collect();
let build_graph_node_index = packages_to_be_built.add_node(BuildPackageNode {
pkgbase: pkgbase.clone(),
commit_hash: package_node.commit_hash.clone(),
srcinfo: srcinfo.clone(),
commit_hash: commit_hash.clone(),
srcinfo: srcinfo_string.clone(),
status: match is_root_node {
true => PackageBuildStatus::Pending,
false => PackageBuildStatus::Blocked,
......@@ -181,9 +188,11 @@ async fn calculate_packages_to_be_built_inner(
pub async fn build_pkgname_to_srcinfo_map(
origin_changesets: Vec<GitRepoRef>,
) -> Result<HashMap<Pkgbase, (Srcinfo, GitRef)>> {
) -> Result<HashMap<Pkgname, (SourceInfoString, SourceInfo, GitRef)>> {
tracing::debug!("Building pkgname->srcinfo map");
spawn_blocking(move || {
let mut pkgname_to_srcinfo_map: HashMap<Pkgbase, (Srcinfo, GitRef)> = HashMap::new();
let mut pkgname_to_srcinfo_map: HashMap<Pkgname, (SourceInfoString, SourceInfo, GitRef)> =
HashMap::new();
// TODO: parallelize
for dir in read_dir("./source_repos")? {
......@@ -202,11 +211,22 @@ pub async fn build_pkgname_to_srcinfo_map(
"Failed to read .SRCINFO from repo at {:?}",
dir.path()
)) {
Ok(srcinfo) => {
for package in &srcinfo.pkgs {
Ok(srcinfo_string) => {
for package in &srcinfo_string
.get_source_info()
.context(format!("{:?}", dir.path().to_str()))
.map(|s| s.packages)
// TODO this is a rather hackish way to swallow any errors here.
// restructure this and handle errors properly
.unwrap_or(Vec::new())
{
pkgname_to_srcinfo_map.insert(
package.pkgname.clone(),
(srcinfo.clone(), get_branch_commit_sha(&repo, "main")?),
package.name.to_string(),
(
srcinfo_string.clone(),
srcinfo_string.get_source_info()?,
get_branch_commit_sha(&repo, "main")?,
),
);
}
}
......@@ -226,35 +246,50 @@ pub async fn build_pkgname_to_srcinfo_map(
// Build a graph where nodes point towards their dependents, e.g.
// gzip -> sed
pub fn build_global_dependent_graph(
pkgname_to_srcinfo_map: &HashMap<Pkgname, (Srcinfo, GitRef)>,
pkgname_to_srcinfo_map: &HashMap<Pkgname, (SourceInfoString, SourceInfo, GitRef)>,
) -> Result<(
StableGraph<PackageNode, PackageBuildDependency>,
HashMap<Pkgname, NodeIndex>,
)> {
tracing::debug!("Building global dependency graph");
tracing::debug!("{} pkgnames", pkgname_to_srcinfo_map.len());
let mut global_graph: StableGraph<PackageNode, PackageBuildDependency> = StableGraph::new();
let mut pkgname_to_node_index_map: HashMap<Pkgname, NodeIndex> = HashMap::new();
// Add all nodes to the graph and build a map of pkgname -> node index
for (pkgname, (srcinfo, commit_hash)) in pkgname_to_srcinfo_map {
tracing::debug!("Adding package nodes");
for (pkgname, (_srcinfo_string, srcinfo, _)) in pkgname_to_srcinfo_map {
let index = global_graph.add_node(PackageNode {
pkgname: pkgname.clone(),
commit_hash: commit_hash.clone(),
});
pkgname_to_node_index_map.insert(pkgname.clone(), index);
// Add every "provides" value to the index map as well
let srcinfo_package = srcinfo
.pkg(pkgname)
.ok_or_else(|| anyhow!("Failed to look up package {pkgname} in srcinfo map"))?;
for provide_vec in &srcinfo_package.provides {
for provide in provide_vec.vec.clone() {
pkgname_to_node_index_map.insert(strip_pkgname_version_constraint(&provide), index);
for architecture in srcinfo.base.architectures.clone() {
let srcinfo_package = srcinfo
.packages_for_architecture(architecture)
.next()
.ok_or_else(|| anyhow!("Failed to look up package {pkgname} in srcinfo map"))?;
for provides in &srcinfo_package.provides {
match provides {
alpm_srcinfo::RelationOrSoname::Relation(package_relation) => {
pkgname_to_node_index_map.insert(
strip_pkgname_version_constraint(&package_relation.name.to_string()),
index,
);
}
// We can ignore sonames as we're only looking up pkgnames later on
alpm_srcinfo::RelationOrSoname::BasicSonameV1(_) => {}
}
}
}
}
// Add edges to the graph for every package that depends on another package
for (dependent_pkgname, (dependent_srcinfo, _commit_hash)) in pkgname_to_srcinfo_map {
tracing::debug!("Adding dependency edges");
for (dependent_pkgname, (_dependent_srcinfo_string, dependent_srcinfo, _commit_hash)) in
pkgname_to_srcinfo_map
{
// get graph index of the current package
let dependent_index = pkgname_to_node_index_map
.get(dependent_pkgname)
......@@ -262,30 +297,36 @@ pub fn build_global_dependent_graph(
"Failed to get node index for dependent pgkname: {dependent_pkgname}"
))?;
// get all dependencies of the current package
let dependencies = &dependent_srcinfo
.pkgs
.iter()
.find(|p| p.pkgname == dependent_pkgname.clone())
.context("Failed to get srcinfo for dependent pkgname")?
.depends;
for arch_vec in dependencies.iter() {
for architecture in dependent_srcinfo.base.architectures.clone() {
let merged_package = dependent_srcinfo
.packages_for_architecture(architecture)
.find(|p| p.name.to_string() == dependent_pkgname.clone())
.context("Failed to get srcinfo for dependent pkgname")?;
// Add edge between current package and its dependencies
for dependency in &arch_vec.vec {
let dependency = strip_pkgname_version_constraint(dependency);
match pkgname_to_node_index_map.get(&dependency).context(format!(
"Failed to get node index for dependency pkgname: {dependency}"
)) {
Ok(dependency_index) => {
global_graph.add_edge(
*dependency_index,
*dependent_index,
PackageBuildDependency {},
);
}
Err(_e) => {
// TODO there are some repos that error here,
// investigate and fix
// tracing::info!("⚠️ {e:#}");
for dependency in merged_package.dependencies {
match dependency {
// TODO we're currently ignoring soname-based dependencies.
// This might exclude some packages that need to be rebuilt
alpm_srcinfo::RelationOrSoname::BasicSonameV1(_) => {}
alpm_srcinfo::RelationOrSoname::Relation(package_relation) => {
let dependency =
strip_pkgname_version_constraint(&package_relation.name.to_string());
match pkgname_to_node_index_map.get(&dependency).context(format!(
"Failed to get node index for dependency pkgname: {dependency}"
)) {
Ok(dependency_index) => {
global_graph.add_edge(
*dependency_index,
*dependent_index,
PackageBuildDependency {},
);
}
Err(_e) => {
// TODO there are some repos that error here,
// investigate and fix
// tracing::info!("⚠️ {e:#}");
}
}
}
}
}
......@@ -375,7 +416,7 @@ pub fn schedule_next_build_in_graph(
pub struct DiffNode {
pub pkgbase: String,
pub commit_hash: String,
pub srcinfo: Srcinfo,
pub srcinfo: SourceInfoString,
/// Packages that this build will emit
pub build_outputs: Vec<BuildPackageOutput>,
}
......
use crate::{GitRef, Pkgbase, PkgbaseMaintainers};
use crate::{GitRef, Pkgbase, PkgbaseMaintainers, SourceInfoString};
use anyhow::{Context, Result};
use camino::Utf8PathBuf;
use git2::build::RepoBuilder;
use git2::{BranchType, FetchOptions, RemoteCallbacks, Repository};
use reqwest::Client;
use srcinfo::Srcinfo;
use std::path::Path;
use tokio::task::JoinSet;
......@@ -110,7 +109,7 @@ pub async fn retrieve_srcinfo_from_remote_repository(
branch: &GitRef,
gitlab_domain: String,
gitlab_packages_group: String,
) -> Result<Srcinfo> {
) -> Result<SourceInfoString> {
let repo =
clone_or_fetch_repository(pkgbase.clone(), gitlab_domain, gitlab_packages_group).await?;
......@@ -148,7 +147,7 @@ pub fn get_branch_commit_sha(repo: &Repository, branch: &str) -> Result<String>
Ok(branch.get().peel_to_commit()?.id().to_string())
}
pub fn read_srcinfo_from_repo(repo: &Repository, branch: &str) -> Result<Srcinfo> {
pub fn read_srcinfo_from_repo(repo: &Repository, branch: &str) -> Result<SourceInfoString> {
let branch = repo.find_branch(&format!("origin/{branch}"), BranchType::Remote)?;
let file_oid = branch
.get()
......@@ -160,7 +159,9 @@ pub fn read_srcinfo_from_repo(repo: &Repository, branch: &str) -> Result<Srcinfo
assert!(!file_blob.is_binary());
srcinfo::Srcinfo::parse_buf(file_blob.content()).context("Failed to parse .SRCINFO")
Ok(SourceInfoString(String::from_utf8(
file_blob.content().to_vec(),
)?))
}
pub fn package_source_path(pkgbase: &Pkgbase) -> Utf8PathBuf {
......
use std::collections::HashMap;
use alpm_srcinfo::SourceInfo;
use anyhow::Context;
use build_set_graph::BuildSetGraph;
use camino::Utf8PathBuf;
use clap::ValueEnum;
use iteration::NewIterationReason;
use serde::{Deserialize, Serialize};
use srcinfo::Srcinfo;
use uuid::Uuid;
pub mod build_package;
......@@ -40,12 +41,24 @@ pub struct UpdateBuildNamespace {
pub status: BuildNamespaceStatus,
}
/// A temporary workaround until `alpm-srcinfo` supports `Serialize` and `Deserialize`
/// for their [`SourceInfo`] struct.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
pub struct SourceInfoString(String);
impl SourceInfoString {
fn get_source_info(&self) -> anyhow::Result<SourceInfo> {
let parsed = SourceInfo::from_string(&self.0)?;
parsed.source_info().context("Failed to parse SRCINFO")
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ScheduleBuild {
pub namespace: Uuid,
pub iteration: Uuid,
pub source: GitRepoRef,
pub srcinfo: Srcinfo,
pub srcinfo: SourceInfoString,
pub install_to_chroot: Vec<BuildPackageOutput>,
pub updated_build_set_graph: BuildSetGraph,
}
......@@ -91,7 +104,7 @@ pub struct BuildNamespace {
pub struct BuildPackageOutput {
pub pkgbase: Pkgbase,
pub pkgname: Pkgname,
pub arch: Vec<String>,
pub arch: Option<Vec<String>>,
/// Output of Srcinfo::version(), stored for convenience
pub version: String,
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment