9 Commits

Author SHA1 Message Date
09d85c8f22 lalalalal 2026-02-02 19:06:47 -05:00
a0ce5183b2 dkkdkdkdkd 2026-02-02 19:02:40 -05:00
c83202b681 fafafafa 2026-02-02 18:59:54 -05:00
2c1d297be1 fda 2026-02-02 18:55:31 -05:00
2d697c1e61 Move package.json files around 2026-02-02 18:47:54 -05:00
410bb671f1 fads 2026-02-02 18:41:26 -05:00
0ae197f939 fdas 2026-02-02 18:39:49 -05:00
370bea5d98 asdf 2026-02-02 18:37:09 -05:00
9d34768051 Add file list 2026-02-02 18:35:37 -05:00
8 changed files with 27 additions and 159 deletions

View File

@@ -1,66 +0,0 @@
import { readFileSync } from "node:fs";
import { dirname, join } from "node:path";
import { fileURLToPath } from "node:url";
const __dirname = dirname(fileURLToPath(import.meta.url));
interface PackageJson {
dependencies?: Record<string, string>;
devDependencies?: Record<string, string>;
}
function readPackageJson(path: string): PackageJson {
const content = readFileSync(path, "utf-8");
return JSON.parse(content);
}
function getAllDependencyNames(pkg: PackageJson): Set<string> {
const names = new Set<string>();
for (const name of Object.keys(pkg.dependencies ?? {})) {
names.add(name);
}
for (const name of Object.keys(pkg.devDependencies ?? {})) {
names.add(name);
}
return names;
}
const diachronPkgPath = join(__dirname, "diachron", "package.json");
const backendPkgPath = join(__dirname, "package.json");
const diachronPkg = readPackageJson(diachronPkgPath);
const backendPkg = readPackageJson(backendPkgPath);
const diachronDeps = getAllDependencyNames(diachronPkg);
const backendDeps = getAllDependencyNames(backendPkg);
const duplicates: string[] = [];
for (const dep of diachronDeps) {
if (backendDeps.has(dep)) {
duplicates.push(dep);
}
}
if (duplicates.length > 0) {
console.error("Error: Duplicate dependencies found.");
console.error("");
console.error(
"The following dependencies exist in both backend/package.json and backend/diachron/package.json:",
);
console.error("");
for (const dep of duplicates.sort()) {
console.error(` - ${dep}`);
}
console.error("");
console.error(
"Dependencies in backend/diachron/package.json are provided by the framework",
);
console.error(
"and must not be duplicated in backend/package.json. Remove them from",
);
console.error("backend/package.json to fix this error.");
process.exit(1);
}
console.log("No duplicate dependencies found.");

View File

@@ -11,5 +11,4 @@ out_dir="$check_dir/out"
source "$check_dir"/../diachron/shims/common source "$check_dir"/../diachron/shims/common
source "$check_dir"/../diachron/shims/node.common source "$check_dir"/../diachron/shims/node.common
$ROOT/cmd tsx "$check_dir/check-deps.ts"
$ROOT/cmd pnpm tsc --outDir "$out_dir" $ROOT/cmd pnpm tsc --outDir "$out_dir"

View File

@@ -10,5 +10,5 @@
"types": ["node"], "types": ["node"],
"outDir": "out" "outDir": "out"
}, },
"exclude": ["**/*.spec.ts", "**/*.test.ts", "check-deps.ts"] "exclude": ["**/*.spec.ts", "**/*.test.ts"]
} }

View File

@@ -1,11 +0,0 @@
#!/bin/bash
set -eu
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ROOT="$DIR/../.."
cd "$ROOT/backend"
"$ROOT/cmd" tsx check-deps.ts "$@"

View File

@@ -1 +0,0 @@
../common.d/check-deps

View File

@@ -1 +0,0 @@
../common.d/check-deps

64
sync.sh
View File

@@ -1,5 +1,7 @@
#!/bin/bash #!/bin/bash
# Note: This is kind of AI slop and needs to be more carefully reviewed.
set -eu set -eu
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
@@ -23,66 +25,50 @@ pnpm_checksum_var="pnpm_checksum_${platform}"
pnpm_binary_url="${!pnpm_binary_var}" pnpm_binary_url="${!pnpm_binary_var}"
pnpm_checksum="${!pnpm_checksum_var}" pnpm_checksum="${!pnpm_checksum_var}"
cache_dir="$HOME/.cache/diachron/v1/binaries" # Set up paths for shims to use
local_dir="$DIR/diachron/binaries" nodejs_dist_dir="diachron/binaries/$nodejs_dirname"
mkdir -p "$cache_dir" "$local_dir" nodejs_bin_dir="$nodejs_dist_dir/bin"
# read_checksum_file <path> # Ensure correct node version is installed
# Prints the contents of a checksum file, or empty string node_installed_checksum_file="$DIR/diachron/binaries/.node.checksum"
# if the file does not exist. node_installed_checksum=""
read_checksum_file() { if [ -f "$node_installed_checksum_file" ]; then
if [ -f "$1" ]; then node_installed_checksum=$(cat "$node_installed_checksum_file")
cat "$1"
fi fi
}
# Ensure Node.js is in the cache if [ "$node_installed_checksum" != "$nodejs_checksum" ]; then
cached_node_checksum=$(read_checksum_file "$cache_dir/.node.checksum")
if [ "$cached_node_checksum" != "$nodejs_checksum" ]; then
echo "Downloading Node.js for $platform..." echo "Downloading Node.js for $platform..."
node_archive="$cache_dir/node.tar.xz" node_archive="$DIR/diachron/downloads/node.tar.xz"
curl -fsSL "$nodejs_binary" -o "$node_archive" curl -fsSL "$nodejs_binary" -o "$node_archive"
echo "Verifying checksum..." echo "Verifying checksum..."
echo "$nodejs_checksum $node_archive" | sha256_check echo "$nodejs_checksum $node_archive" | sha256_check
echo "Extracting Node.js..." echo "Extracting Node.js..."
rm -rf "${cache_dir:?}/$nodejs_dirname" tar -xf "$node_archive" -C "$DIR/diachron/binaries"
tar -xf "$node_archive" -C "$cache_dir"
rm "$node_archive" rm "$node_archive"
echo "$nodejs_checksum" >"$cache_dir/.node.checksum" echo "$nodejs_checksum" >"$node_installed_checksum_file"
fi fi
# Copy Node.js into the working directory if needed # Ensure correct pnpm version is installed
local_node_checksum=$(read_checksum_file "$local_dir/.node.checksum") pnpm_binary="$DIR/diachron/binaries/pnpm"
if [ "$local_node_checksum" != "$nodejs_checksum" ]; then pnpm_installed_checksum_file="$DIR/diachron/binaries/.pnpm.checksum"
echo "Installing Node.js into project..." pnpm_installed_checksum=""
rm -rf "${local_dir:?}/$nodejs_dirname" if [ -f "$pnpm_installed_checksum_file" ]; then
cp -R "$cache_dir/$nodejs_dirname" "$local_dir/$nodejs_dirname" pnpm_installed_checksum=$(cat "$pnpm_installed_checksum_file")
echo "$nodejs_checksum" >"$local_dir/.node.checksum"
fi fi
# Ensure pnpm is in the cache if [ "$pnpm_installed_checksum" != "$pnpm_checksum" ]; then
cached_pnpm_checksum=$(read_checksum_file "$cache_dir/.pnpm.checksum")
if [ "$cached_pnpm_checksum" != "$pnpm_checksum" ]; then
echo "Downloading pnpm for $platform..." echo "Downloading pnpm for $platform..."
curl -fsSL "$pnpm_binary_url" -o "$cache_dir/pnpm" curl -fsSL "$pnpm_binary_url" -o "$pnpm_binary"
echo "Verifying checksum..." echo "Verifying checksum..."
echo "$pnpm_checksum $cache_dir/pnpm" | sha256_check echo "$pnpm_checksum $pnpm_binary" | sha256_check
chmod +x "$cache_dir/pnpm" chmod +x "$pnpm_binary"
echo "$pnpm_checksum" >"$cache_dir/.pnpm.checksum" echo "$pnpm_checksum" >"$pnpm_installed_checksum_file"
fi
# Copy pnpm into the working directory if needed
local_pnpm_checksum=$(read_checksum_file "$local_dir/.pnpm.checksum")
if [ "$local_pnpm_checksum" != "$pnpm_checksum" ]; then
echo "Installing pnpm into project..."
cp "$cache_dir/pnpm" "$local_dir/pnpm"
echo "$pnpm_checksum" >"$local_dir/.pnpm.checksum"
fi fi
# Get golang binaries in place # Get golang binaries in place

View File

@@ -1,38 +0,0 @@
#!/bin/bash
# shellcheck disable=SC2002
set -eu
set -o pipefail
IFS=$'\n\t'
# print useful message on failure
trap 's=$?; echo >&2 "$0: Error on line "$LINENO": $BASH_COMMAND"; exit $s' ERR
# shellcheck disable=SC2034
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$DIR"
# - Check if the file .diachron-version exists; save its value in a variable
# named old_diachron_version
# - Check if the repository is dirty; if there are any files that git knows
# about that have been changed but not committed, abort with a message
# - Get the current commit and store it in a variable
# - Perform a two checkouts of
# https://gitea.philologue.net/philologue/diachron, each in its own
# temporary directory. We'll call one "old" and one "new"
# - In old, check out $old_diachron_version; in our working application
# directory, delete all of the files in file-list using git rm
# - In new, check out whatever was passed as argv[1]
# - Copy all of the files in file-list to the working application and stage
# them with git add
# - Commit
# - Should we run sync.sh or should we advise the user to run sync.sh?