#!/bin/bash
set -euo pipefail
+cd "$(dirname "$0")"
+SCRIPT_DIR="$PWD"
+
echo "loading environment"
source "$SCRIPT_DIR/environment.sh"
-
+cd "$SCRIPT_DIR"
echo "cleaning ..."
- # will force download of sources:
- # bash "$SCRIPT_DIR/clean_dist.sh"
-
- bash "$SCRIPT_DIR/clean_build.sh"
+ # Run before a rebuild; skips source deletion
+ ./clean_build.sh
echo "setting up the project ..."
- bash "$SCRIPT_DIR/make_project_structure.sh"
+ # Creates directory structure; idempotent
+ ./make_project_structure.sh
echo "downloading and expanding upstream sources"
- bash "SCRIPT_DIR/download_expand_sources.sh"
+ # Downloads tarballs and clones repos
+ ./download_sources.sh
echo "building binutils"
-bash "$SCRIPT_DIR/build_binutils_requisites.sh"
-bash "$SCRIPT_DIR/build_binutils.sh"
+ ./build_binutils_requisites.sh
+ ./build_binutils.sh
echo "Step 3: glibc headers installed"
-# These provide just enough to bootstrap GCC Stage 1
-bash "$SCRIPT_DIR/build_linux_headers.sh"
-bash "$SCRIPT_DIR/prepare_glibc_sources.sh"
-bash "$SCRIPT_DIR/build_glibc_headers.sh"
-
+ ./build_linux_headers.sh
+ ./prepare_glibc_sources.sh
+ ./build_glibc_headers.sh
echo "Step 4: GCC Stage 1"
-bash "$SCRIPT_DIR/build_gcc_stage1_requisites.sh"
-bash "$SCRIPT_DIR/build_gcc_stage1.sh"
+ ./build_gcc_stage1_requisites.sh
+ ./build_gcc_stage1.sh
echo "Step 5: Build glibc (full libc build)"
-bash "$SCRIPT_DIR/build_glibc_requisites.sh"
-bash "$SCRIPT_DIR/build_glibc.sh"
-
+ ./build_glibc_requisites.sh
+ ./build_glibc.sh
echo "Step 6: Final GCC"
-bash "$SCRIPT_DIR/build_gcc_final_requisites.sh"
-bash "$SCRIPT_DIR/build_gcc_final.sh"
-
+ ./build_gcc_final_requisites.sh
+ ./build_gcc_final.sh
-echo "🎉 Toolchain build complete!"
+echo "✅ Toolchain build complete"
"$TOOLCHAIN/bin/gcc" --version
#!/bin/bash
+# removes only the tarball expansions from upstream
+# git repos are removed with `clean_upstream`
+
set -euo pipefail
+
source "$(dirname "$0")/environment.sh"
-# Clean source expansions (but keep tarballs)
-rm -rf "$LINUX_SRC" "$BINUTILS_SRC" "$GCC_SRC" "$GLIBC_SRC"
+i=0
+while [ $i -lt ${#UPSTREAM_TARBALL_LIST[@]} ]; do
+ tarball="${UPSTREAM_TARBALL_LIST[$i]}"
+ # skip url
+ i=$((i + 1))
+ # skip explicit dest dir
+ i=$((i + 1))
+
+ base_name="${tarball%.tar.*}"
+ dir="$SRC/$base_name"
+
+ if [[ -d "$dir" ]]; then
+ echo "rm -rf $dir"
+ rm -rf "$dir"
+ fi
+
+ i=$((i + 1))
+done
-echo "✅ Cleared source expansions: $LINUX_SRC, $BINUTILS_SRC, $GCC_SRC, $GLIBC_SRC"
+echo "✅ clean_source_expansion.sh"
--- /dev/null
+#!/bin/bash
+set -euo pipefail
+
+source "$(dirname "$0")/environment.sh"
+
+for dir in "${SOURCE_DIR_LIST[@]}"; do
+ if [[ -d "$dir" ]]; then
+ echo "rm -rf $dir"
+ rm -rf "$dir"
+ fi
+done
+
+echo "✅ clean_source_expansion.sh"
--- /dev/null
+#!/bin/bash
+set -euo pipefail
+
+source "$(dirname "$0")/environment.sh"
+
+# Remove tarballs
+i=0
+while [ $i -lt ${#UPSTREAM_TARBALL_LIST[@]} ]; do
+ tarball="${UPSTREAM_TARBALL_LIST[$i]}"
+ path="$UPSTREAM/$tarball"
+
+ if [[ -f "$path" ]]; then
+ echo "rm $path"
+ rm "$path"
+ fi
+
+ i=$((i + 3))
+done
+
+# Remove Git repositories
+i=0
+while [ $i -lt ${#UPSTREAM_GIT_REPO_LIST[@]} ]; do
+ dir="${UPSTREAM_GIT_REPO_LIST[$((i+2))]}"
+
+ if [[ -d "$dir" ]]; then
+ echo "rm -rf $dir"
+ rm -rf "$dir"
+ fi
+
+ i=$((i + 3))
+done
+
+echo "✅ clean_upstream.sh"
+++ /dev/null
-#!/bin/bash
-set -euo pipefail
-
-source "$(dirname "$0")/environment.sh"
-
-# 🛠️ Function to check if the internet is accessible
-check_internet_connection() {
- echo "🌐 Checking internet connection..."
- if curl -s --head http://google.com | head -n 1 | grep "HTTP/1.1 200 OK" > /dev/null; then
- echo "✅ Internet is reachable."
- else
- echo "❌ No internet connection detected. Proceeding with caution..."
- fi
-}
-
-# 🏰 Function to check if the server for the URL is reachable
-check_server_reachability() {
- local url=$1
- echo "🌍 Checking if the server $url is reachable..."
- if curl -s --head "$url" | head -n 1 | grep "HTTP/1.1 200 OK" > /dev/null; then
- echo "✅ Server $url is reachable."
- else
- echo "❌ Cannot reach server $url. Proceeding with download attempt anyway..."
- fi
-}
-
-# 🏰 Function to check if file already exists in UPSTREAM
-check_file_exists() {
- local file=$1
- if [[ -f "$UPSTREAM/$file" ]]; then
- echo "⚡ $file already exists in $UPSTREAM, skipping download."
- return 0 # File exists, so skip download
- else
- return 1 # File doesn't exist, needs to be downloaded
- fi
-}
-
-# 🛡️ Function to download a file from a URL
-download_file() {
- local file=$1
- local url=$2
-
- echo "📥 Downloading $file..."
- curl -LO "$url"
-
- if [[ -f "$file" ]]; then
- echo "✅ Successfully downloaded $file."
- else
- echo "❌ Error downloading $file. Continuing with next source."
- fi
-}
-
-# 🌍 Main Function to download all sources
-fetch_sources() {
- echo "🧙♂️ Fetching legendary sources for the build..."
-
- # Check for internet connection first
- check_internet_connection
-
- # Define source list (version-controlled)
- sources=(
- "$LINUX_TARBALL:$LINUX_URL"
- "$BINUTILS_TARBALL:$BINUTILS_URL"
- "$GCC_TARBALL:$GCC_REPO" # Special case for Git repo
- "$GLIBC_TARBALL:$GLIBC_URL"
- )
-
- for source in "${sources[@]}"; do
- IFS=":" read -r tarball url <<< "$source"
-
- if check_file_exists "$tarball"; then
- continue # Skip if file already exists
- fi
-
- # Check if we can reach the server before attempting download
- check_server_reachability "$url"
-
- # Special case for Git-based source (GCC)
- if [[ "$tarball" == *"gcc"* ]]; then
- echo "⚡ Fetching GCC source from Git repo: $GCC_REPO"
- git clone --branch "$GCC_BRANCH" "$GCC_REPO" "$UPSTREAM/gcc-$GCC_VER"
- echo "✅ GCC source fetched from Git."
- else
- download_file "$tarball" "$url"
- fi
- done
-
- echo "🛠️ All sources fetched and ready for build!"
-}
-
-# Start the fetching process
-fetch_sources
--- /dev/null
+#!/bin/bash
+# this script can be run multiple times so as to fetch what was missed on prior invocations
+# if there is a corrupt tarball, delete it and run this again
+# sometimes the connection test fails, then the data downloads anyway
+
+set -uo pipefail # no `-e`, we want to continue on error
+
+source "$(dirname "$0")/environment.sh"
+
+check_internet_connection() {
+ if ! curl -s --connect-timeout 5 https://example.com > /dev/null; then
+ echo "⚠️ No internet connection detected"
+ fi
+}
+
+check_server_reachability() {
+ local url=$1
+ if ! curl -s --head "$url" | head -n 1 | grep -q "HTTP/1.1 200 OK"; then
+ echo "⚠️ Cannot reach $url"
+ fi
+}
+
+check_file_exists() {
+ local file=$1
+ [[ -f "$UPSTREAM/$file" ]]
+}
+
+download_file() {
+ local file=$1
+ local url=$2
+
+ echo "curl -LO $url"
+ if (cd "$UPSTREAM" && curl -LO "$url"); then
+ if [[ -f "$UPSTREAM/$file" ]]; then
+ return 0
+ else
+ echo "❌ $file did not appear after download"
+ return 1
+ fi
+ else
+ echo "❌ Failed to download $file"
+ return 1
+ fi
+}
+
+fetch_tarballs() {
+ i=0
+ while [ $i -lt ${#UPSTREAM_TARBALL_LIST[@]} ]; do
+ tarball="${UPSTREAM_TARBALL_LIST[$i]}"
+ url="${UPSTREAM_TARBALL_LIST[$((i+1))]}"
+
+ if check_file_exists "$tarball"; then
+ i=$((i + 3))
+ continue
+ fi
+
+ check_server_reachability "$url"
+
+ if ! download_file "$tarball" "$url"; then
+ echo "⚠️ Skipping $tarball due to previous error"
+ fi
+
+ i=$((i + 3))
+ done
+}
+
+fetch_git_repos() {
+ i=0
+ while [ $i -lt ${#UPSTREAM_GIT_REPO_LIST[@]} ]; do
+ repo="${UPSTREAM_GIT_REPO_LIST[$i]}"
+ branch="${UPSTREAM_GIT_REPO_LIST[$((i+1))]}"
+ dir="${UPSTREAM_GIT_REPO_LIST[$((i+2))]}"
+
+ if [[ -d "$dir/.git" ]]; then
+ i=$((i + 3))
+ continue
+ fi
+
+ echo "git clone --branch $branch $repo $dir"
+ if ! git clone --branch "$branch" "$repo" "$dir"; then
+ echo "❌ Failed to clone $repo → $dir"
+ fi
+
+ i=$((i + 3))
+ done
+}
+
+check_internet_connection
+fetch_tarballs
+fetch_git_repos
+
+echo "✅ download_expand_source.sh"
export BUILD_DIR="$ROOT/build"
export LOGDIR="$ROOT/log"
export UPSTREAM="$ROOT/upstream"
+ export SRC=$ROOT/source
# Synthesized directory lists
PROJECT_DIR_LIST=(
"$LOGDIR"
"$SYSROOT" "$TOOLCHAIN" "$BUILD_DIR"
- "$UPSTREAM"
+ "$UPSTREAM" "$SRC"
)
# list these in the order they can be deleted
PROJECT_SUBDIR_LIST=(
)
# Source directories
- export SRC=$ROOT/source
export LINUX_SRC="$SRC/linux-$LINUX_VER"
export BINUTILS_SRC="$SRC/binutils-$BINUTILS_VER"
export GCC_SRC="$SRC/gcc-$GCC_VER"
export UPSTREAM_TARBALL_LIST=(
"$LINUX_TARBALL"
"https://cdn.kernel.org/pub/linux/kernel/v6.x/$LINUX_TARBALL"
- "$ROOT/linux-$LINUX_VER"
+ "$UPSTREAM/linux-$LINUX_VER"
"$BINUTILS_TARBALL"
"https://ftp.gnu.org/gnu/binutils/$BINUTILS_TARBALL"
- "$ROOT/binutils-$BINUTILS_VER"
+ "$UPSTREAM/binutils-$BINUTILS_VER"
"$GLIBC_TARBALL"
"https://ftp.gnu.org/gnu/libc/$GLIBC_TARBALL"
- "$ROOT/glibc-$GLIBC_VER"
+ "$UPSTREAM/glibc-$GLIBC_VER"
)
# Git Repositories (URL, Branch, Destination Directory)
export GCC_BRANCH="releases/gcc-15"
# Git Repo Info: Repository URL, Branch, Destination Directory
+ # Repo's expand on load so go directly into $SRC
export UPSTREAM_GIT_REPO_LIST=(
"$GCC_REPO"
"$GCC_BRANCH"
- "$ROOT/gcc-$GCC_VER"
+ "$SRC/gcc-$GCC_VER"
#currently there is no second repo
)
--- /dev/null
+#!/bin/bash
+set -euo pipefail
+
+source "$(dirname "$0")/environment.sh"
+
+i=0
+while [ $i -lt ${#UPSTREAM_TARBALL_LIST[@]} ]; do
+ tarball="${UPSTREAM_TARBALL_LIST[$i]}"
+ # url is unused
+ src_path="$UPSTREAM/$tarball"
+
+ # Strip compression suffix to guess subdirectory name
+ base_name="${tarball%.tar.*}"
+ target_dir="$SRC/$base_name"
+
+ if [[ -d "$target_dir" ]]; then
+ echo "⚠️ $target_dir already exists, skipping"
+ i=$((i + 3))
+ continue
+ fi
+
+ if [[ ! -f "$src_path" ]]; then
+ echo "❌ Missing tarball: $src_path"
+ i=$((i + 3))
+ continue
+ fi
+
+ echo "tar -xf $tarball → $SRC"
+ (
+ cd "$SRC"
+ tar -xf "$src_path"
+ )
+
+ if [[ -d "$target_dir" ]]; then
+ echo "✅ Extracted to $target_dir"
+ else
+ echo "❌ Expected $target_dir not found after extraction"
+ fi
+
+ i=$((i + 3))
+done
+
+echo "✅ extract_from_tar.sh"
--- /dev/null
+/home/Thomas/subu_data/developer/RT_gcc/tmp/