1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-28 01:59:06 -05:00
denoland-deno/.appveyor.yml

424 lines
18 KiB
YAML

version: '{build}.{branch}'
skip_branch_with_pr: true
clone_folder: c:\deno
clone_depth: 1
environment:
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
DENO_BUILD_MODE: release
DENO_BUILD_PATH: $(APPVEYOR_BUILD_FOLDER)\out\release
DENO_THIRD_PARTY_PATH: $(APPVEYOR_BUILD_FOLDER)\third_party
MTIME_CACHE_DB: $(APPVEYOR_BUILD_FOLDER)\mtime_cache.xml
CARGO_HOME: $(USERPROFILE)\.cargo
RUSTUP_HOME: $(USERPROFILE)\.rustup
RELEASE_ARTIFACT: deno_win_x64.zip
# Appveyor uses 7zip to pack cache directories. We use these options:
# -t7z : Use '7z' format.
# -snl : Store symlinks; doesn't work, but it prevents following symlinks.
# -mtc : Use UTC timestamps. This is required for incremental builds.
# -mx=1 : Fast compression.
APPVEYOR_CACHE_ENTRY_ZIP_ARGS: -t7z -snl -mtc -mx=1
# Define some PowerShell helper functions which are used in the scripts below.
# They're defined in an environment variable to reduce noise in the build log.
PS_UTILS: |-
# `Exec` runs a regular executable. It looks at the process' exit code,
# rather than its stderr output, to tell if a command has failed.
function Exec([ScriptBlock] $Command, [switch] $NoNewLines) {
"$Command".TrimStart(" &") | Write-Host # Echo command.
& $Command 2>&1 | Write-Host -NoNewLine:$NoNewLines # Execute command.
if ($NoNewLines) { Write-Host } # Write newline.
if ($LastExitCode -ne 0) { throw "Failure. Exit code: $LastExitCode" }
}
# Get-Tree lists all objects in a tree. It's different from Get-ChildItem
# in that the latter recurses through symlinks, which is problematic.
function Get-Tree([string[]] $Path, [switch] $Recurse, [switch] $Force) {
function Get-SubDirs([string[]] $Path) {
Get-ChildItem $Path -Force:$Force `
-Attributes Directory+!ReparsePoint |
foreach { $_.FullName } |
foreach { $_; Get-SubDirs $_ }
}
if ($Recurse) { $Path += Get-SubDirs $Path }
Get-ChildItem $Path -Force:$Force @args
}
# `Delete-Tree` is a simple wrapper around Remove-Item. It doesn't set
# an error status if one of the paths to be deleted doesn't exist.
function Delete-Tree([string[]] $Path) {
$Path | foreach {
"Deleting '$_'" | Write-Host -NoNewLine
if (Test-Path $_) {
Remove-Item $_ -Recurse -Force -ErrorAction Ignore
$(if ($?) { " - ok" } else { " - failed" }) | Write-Host
} else {
" - not found" | Write-Host
}
}
}
# We set file atimes to this date and see if it changes.
$FILE_NOT_NEEDED = Get-Date -Date "1984-04-11T00:00:00Z" # A good year.
# Traced files are stored a hash table, using their full path as key.
$not_needed = @{}
# Whether filesystem last access time tracking has been enabled yet.
$atime_enabled = $false
# Trace whether files are actually used, so we can find and remove files
# that unnecessary. We use this to avoid hoarding stale build outputs.
function Start-TraceFilesNeeded([string[]] $Path, [switch] $Recurse) {
# Don't enable if the cache won't be saved.
if (-not (Get-SaveCache)) { return }
# Identify (new) files to trace. A non-existing path is not an error.
$files = $Path |
where { Test-Path $_ } |
foreach { Get-Tree $_ -Recurse:$Recurse -File } |
where { -not $not_needed.ContainsKey($_.FullName) }
# Set newly traced files' last access time to very long ago.
$files | foreach { $_.LastAccessTime = $FILE_NOT_NEEDED }
# Add newly traced files to the hash table with unnecessary files.
$files | foreach { $not_needed.Add($_.FullName, $true) }
# Enable last access time tracking only if any files were found.
if ($files -and -not $atime_enabled) {
Exec { fsutil behavior set DisableLastAccess 0 }
Set-Variable -Name atime_enabled -Value $true -Scope 1
}
# Log statistics.
Write-Host "Tracing file access for $($files.Count) files."
}
# Marks files as needed.
# -Auto : Auto mark files if their access time has changed.
# -Path <path[]> : Explicitly mark file(s) as needed.
# -Recurse : Recurse into directories specified with -Path.
# -Reason <text> : Optional reason, written to the build log.
function Set-FilesNeeded([switch] $Auto, [string[]] $Path,
[switch] $Recurse, [string] $Reason) {
# Helper function.
function Mark([System.IO.FileSystemInfo[]] $Files, [string] $How) {
# Find matching files that are traced, then remove them.
$keys = $Files.FullName |
where { $_ -and $not_needed.ContainsKey($_) }
$keys | foreach { $not_needed.Remove($_) }
# Write log message.
if ($keys.Count -gt 0) {
Write-Host ("$Reason$(if ($Reason) { ': ' })" +
"$($keys.Count) files $How marked 'needed'.")
}
}
# Skip marking step if there are no files being traced.
if ($not_needed.Count -eq 0) { return }
# Auto mark files 'needed' when their last access time has changed.
if ($Auto) {
$files = $not_needed.Keys |
where { Test-Path $_ -PathType Leaf } |
foreach { Get-Item $_ -Force } |
where { $_.LastAccessTime -ne $FILE_NOT_NEEDED }
Mark -Files $files -How "automatically"
}
# Mark explicitly specified paths.
if ($Path) {
$files = $Path |
where { Test-Path $_ } |
foreach { Get-Tree $_ -Recurse:$Recurse -Force -File }
Mark -Files $files -How "explicitly"
}
}
# Clean up stale files and end file tracking.
function Stop-TraceFilesNeeded {
# Look for files that had their atime changed, and mark them needed.
Set-FilesNeeded -Auto
# Make a list of all files to delete.
$files = $not_needed.Keys |
where { Test-Path $_ -PathType Leaf } |
foreach { Get-Item $_ -Force }
# Compute the total size of all to-be-deleted files.
$size_info = $files | measure -Property Length -Sum
$size_mb = "{0:N1}" -f ($size_info.Sum / (1024 * 1024))
# Delete files, as well as parent directories if they became empty.
$files | Remove-Item -Force
$dirs = $files | foreach {
try { while ($_ = $_.Directory) { $_.Delete(); $_ } } catch {}
}
# All unnecessary files are now gone.
$not_needed.Clear()
# Summarize what was cleaned up in the build log.
if ($files.Count -gt 0) {
Write-Host "Deleted $($files.Count) unnecessary files and",
"$($dirs.Count) directories ($size_mb MB)."
}
}
function Sync-MTimeCache([string] $DatabasePath) {
# Load the previously saved cache, if it exists.
try {
$old_cache = Import-CliXml -Path $DatabasePath -ErrorAction Stop
} catch {
$old_cache = @{}
}
# The updated cache gets populated while restoring the old one.
$new_cache = @{}
# Determine the mtime that will be assigned to new and modified files.
# To retain nanosecond precision when (de)serializing, mtimes are stored
# as 64-bit 'FileTime' integers, not as DateTime objects.
$now = (Get-Date).ToFileTimeUtc()
# Since we're gonna rely on git to give us file SHAs, double check that
# the work dir is clean and the index is up to date.
$dirty = git status -z --ignore-submodules=all --untracked-files=no
if ($dirty) { throw "Work tree dirty." }
# Ask git for a list of checked-out files and their hashes, metadata.
(git ls-files -z --stage --eol --full-name) -split "\0" | foreach {
# Skip non-files (symlinks etc.). File mode should be 100644/100755.
if ($_ -notmatch "^100") { return }
# Look up mtime in cache. Reset to 'now' if not found or invalid.
# The entire "mode hash attr filename" line serves as the cache key.
$mtime = $old_cache[$_]
if (-not $mtime -or $mtime -gt $now) { $mtime = $now }
# Change the file's LastWriteTime to the mtime found in the cache.
$path = ($_ -split "\t", 3)[2] # Filename starts after 2nd tab.
$lwt = [DateTime]::FromFileTimeUtc($mtime)
Set-ItemProperty -Path $path -Name LastWriteTime -Value $lwt -Force
# Add entry to updated cache.
$new_cache[$_] = $mtime
}
# Write the updated cache back to disk.
if (Get-SaveCache) {
$new_cache | Export-CliXml -Path $DatabasePath -ErrorAction Stop
}
# Log some statistics to get an idea if this is all working.
$rows = [ordered]@{ Valid = "=="; New = "=>"
Stale = "<="; Total = "*" }
$keys = @{ old = @($old_cache.Keys); new = @($new_cache.Keys) }
$diff = Compare-Object $keys.old $keys.new -IncludeEqual
$rows.GetEnumerator() | foreach {
$keyset = ($diff | where SideIndicator -like $_.Value).InputObject
New-Object -TypeName PSObject -Property ([ordered]@{
"Status" = $_.Name
"Old Cache #" = ($keyset | where { $_ -in $keys.old }).Count
"New Cache #" = ($keyset | where { $_ -in $keys.new }).Count
})
} | Format-Table | Out-String -Stream | where { $_ }
}
# Get-SaveCache returns $true if the cache will be saved at the end.
function Get-SaveCache {
-not $env:APPVEYOR_PULL_REQUEST_NUMBER -and
-not $env:APPVEYOR_CACHE_SKIP_SAVE -eq "true"
}
for:
# Do no save the build cache for feature branches. TODO: Once we have multiple
# permanent branches, use a build matrix so each branch has it's own cache.
- branches:
except:
- master
environment:
APPVEYOR_CACHE_SKIP_SAVE: true
cache:
# Python packages installed with `pip --user`.
- $(APPDATA)\Python
# Rust stuff.
- $(CARGO_HOME)
- $(RUSTUP_HOME)
# Cache the third_party submodule to preserve binaries downloaded by setup.py,
# and to make incremental builds work.
- $(APPVEYOR_BUILD_FOLDER)\.git\modules\third_party
- $(APPVEYOR_BUILD_FOLDER)\third_party
# Cache file mtimes in the main git repo, also to enable incremental builds.
- $(MTIME_CACHE_DB)
# Build incrementally.
- $(DENO_BUILD_PATH)
init:
# Load utility functions
- ps: Invoke-Expression $env:PS_UTILS
# Make git check out symlinks (not placeholder text files).
- git config --global core.symlinks true
install:
# Clone the third_party submodule.
- ps: |-
try {
Exec { & git submodule update --init --force --depth 1 }
} catch {
# Git will fail if the `third_party` directory was restored from cache,
# but the `.git/modules` directory wasn't. Rebuild it from scratch.
Delete-Tree $env:DENO_THIRD_PARTY_PATH
Exec -NoNewLines { & git submodule update --init --force --depth 1 }
}
# Prune and pack git objects. Thus when we upload `.git/modules/` to the
# Appveyor cache, it'll include only objects that were actually needed.
# This step is skipped if the cache is not going to be saved.
- ps: if (Get-SaveCache) { git -C $env:DENO_THIRD_PARTY_PATH gc --prune=all }
# Git doesn't store file mtimes, so those are stored separately in the cache.
# Without it, ninja will assume all files are dirty and rebuild everything.
- ps: Sync-MTimeCache -DatabasePath $env:MTIME_CACHE_DB
# Configure depot_tools and add it to the search path. This is necessary
# because, later in this script, we need to invoke ninja directly.
- ps: |-
$env:PATH = "$env:DENO_THIRD_PARTY_PATH\depot_tools;$env:PATH"
$env:DEPOT_TOOLS_WIN_TOOLCHAIN = "0"
# Install a recent Node.js version.
- ps: Install-Product -Product node -Version 10 -Platform x64
# Make sure the right Python version is in PATH, and others are not.
- ps: |-
# Remove the wrong Python version(s) from PATH.
$p = $env:PATH -split ";" | where {
-not (Test-Path "$_\python.exe") -and
-not (Test-Path "$_\pip.exe")
}
# Add binary dir for `pip --user` packages.
$p += "$env:APPDATA\Python\Scripts"
# Add python27-x64.
$p += "c:\Python27-x64"
$p += "c:\Python27-x64\Scripts"
$env:PATH = $p -join ";"
# Pip on Appveyor is too old. Install a recent version in our user dir.
- python -m pip install --upgrade --user --no-cache-dir pip
# Install Python packages.
- pip install --upgrade --user --no-cache-dir pywin32 yapf
# Add Rust/Cargo to PATH.
- ps: $env:PATH += ";$env:CARGO_HOME\bin"
# Look for Rust updates.
# * If there are no updates, rustup will exit cleanly.
# * If there are updates, rustup will attempt to install them, and then blow
# up because we removed the 'rust-docs' component.
# * The actual update is done by removing and reinstalling with rustup-init.
- ps: |-
if (Get-SaveCache -and (Test-Path $env:CARGO_HOME)) {
try {
Exec -NoNewLines { & rustup update stable-x86_64-pc-windows-msvc }
} catch {
Delete-Tree $env:CARGO_HOME, $env:RUSTUP_HOME
}
}
# Install or reinstall Rust via rustup-init.
# * After install/update, the rustup directory is very big, with many files,
# slowing down cache save/restore a lot, so we remove unnecessary stuff.
# * TODO: Use `rustup component remove docs` instead, when this issue
# is resolved: https://github.com/rust-lang-nursery/rustup.rs/issues/998.
# * TODO: Ship Rust in the third_party repo. See issue #386.
- ps: |-
if (-not (Test-Path $env:CARGO_HOME)) {
Invoke-WebRequest -Uri "https://win.rustup.rs" `
-OutFile "$env:TEMP\rustup-init.exe"
Exec -NoNewLines { & "$env:TEMP\rustup-init.exe" -y }
Delete-Tree @(
"$env:RUSTUP_HOME\downloads",
"$env:RUSTUP_HOME\tmp",
"$env:RUSTUP_HOME\toolchains\stable-x86_64-pc-windows-msvc\share\doc"
)
}
# Log installed Node.js version + processor architecture.
- node -p "`Node ${process.version} ${process.arch}`"
# Log installed Python version + processor architecture.
- ps: |-
@("from sys import version",
"print 'Python', version") -join "`n" | & python -
# Log some more versions.
- pip --version
- rustc --version
- cargo --version
before_build:
# Mark all files in the build dir 'not needed' until proven otherwise.
# TODO: also track files in third_party that aren't checked into the repo.
- ps: Start-TraceFilesNeeded $env:DENO_BUILD_PATH -Recurse
# Download clang and gn, generate ninja files.
- python tools\setup.py
- ps: Set-FilesNeeded -Auto -Reason "Setup finished"
# Mark files that are produced during the build, and are known to ninja, as
# needed. We obtain this list by dry-running `ninja -t clean`.
- ps: |-
$outputs = ninja -C $env:DENO_BUILD_PATH -n -t clean -g |
where { $_ -match "^Remove (.*)$" } |
foreach { "$env:DENO_BUILD_PATH\$($Matches[1])" }
Set-FilesNeeded -Auto -Path $outputs -Reason "Build dependency graph"
build_script:
- python tools\build.py
- ps: Set-FilesNeeded -Auto -Reason "Build finished"
test_script:
- python tools\lint.py
- ps: Exec { & python tools\test.py $env:DENO_BUILD_PATH }
after_test:
# Remove stale files and empty dirs from the build directory.
- ps: Stop-TraceFilesNeeded
# Verify that the build is fully up-to-date. Running ninja should be a no-op.
# This catches erroneous file cleanup, and incorrectly set up build deps.
- ps: |-
$out = ninja -C $env:DENO_BUILD_PATH -n -d explain
if ($out -notcontains "ninja: no work to do.") {
throw "Build should be up-to-date but isn't."
}
# Verify that javascript and typescript files which are bundled by rollup are
# listed explicitly in BUILD.gn. This is not an air-tight check.
# TODO: make rollup or another bundler write a depfile.
- ps: |-
$ignore = "test_util.ts", "unit_tests.ts", "*_test.ts"
Get-ChildItem "js" -File -Force -Name |
where { $name = $_; -not ($ignore | where { $name -like $_ }) } |
where { -not (Select-String -Pattern $_ -Path BUILD.gn `
-SimpleMatch -CaseSensitive) } |
foreach { throw "$_ should be listed in BUILD.gn but isn't." }
# Verify that generated ninja files do not use absolute path names.
# If they do, it makes ccache/sccache much less effective.
- ps: |-
$trap = "NO_ABS_PATH_PLS"
$dir = "$env:APPVEYOR_BUILD_FOLDER\out\$trap"
Exec { gn gen $dir | Out-Null }
$files = Get-Tree $dir -File -Force -Recurse | where Extension -ne ".dll"
Select-String $trap -Path $files -SimpleMatch | tee -Variable line_matches
if ($line_matches) {
$ctx = $line_matches.Line |
Select-String "[^\s;,]*[\s=]*[^\s;,=]*$trap[^\s;,]*" -AllMatches |
foreach { $_.Matches.Value -replace '\$(.)', '$1' } |
sort -Unique
throw @("Absolute path(s) found in build script:") + $ctx -join "`n "
}
# If this build is going to be deployed, build a zip file.
- ps: |-
if ($env:APPVEYOR_REPO_TAG -eq "true") {
Compress-Archive -CompressionLevel Optimal -Force `
-Path "$env:DENO_BUILD_PATH\deno.exe" `
-DestinationPath "$env:APPVEYOR_BUILD_FOLDER\$env:RELEASE_ARTIFACT"
}
artifacts:
path: $(RELEASE_ARTIFACT)
deploy:
provider: GitHub
auth_token:
secure: HQIIUEOtep3yRiBacZCtX8hVmgtdNvt6Hx7u9fP4Wj2ZYp+eBFP2OLf67RKVa5VZ
on:
APPVEYOR_REPO_NAME: denoland/deno
APPVEYOR_REPO_TAG: true