2020-05-24 22:50:33 +02:00
|
|
|
import * as fs from 'fs'
|
2020-05-20 17:03:08 +02:00
|
|
|
import * as core from '@actions/core'
|
2020-05-21 00:31:16 +02:00
|
|
|
import * as github from '@actions/github'
|
|
|
|
import {Webhooks} from '@octokit/webhooks'
|
|
|
|
|
2020-08-30 21:18:14 +02:00
|
|
|
import {Filter, FilterResults} from './filter'
|
2020-07-11 17:17:56 +02:00
|
|
|
import {File, ChangeStatus} from './file'
|
2020-05-26 17:16:09 +02:00
|
|
|
import * as git from './git'
|
2020-08-30 21:18:14 +02:00
|
|
|
import shellEscape from './shell-escape'
|
2020-05-20 17:03:08 +02:00
|
|
|
|
2020-08-30 21:18:14 +02:00
|
|
|
type ExportFormat = 'none' | 'json' | 'shell'
|
2020-07-11 23:33:11 +02:00
|
|
|
|
2020-05-20 17:03:08 +02:00
|
|
|
async function run(): Promise<void> {
|
|
|
|
try {
|
2020-07-02 22:56:14 +02:00
|
|
|
const workingDirectory = core.getInput('working-directory', {required: false})
|
|
|
|
if (workingDirectory) {
|
|
|
|
process.chdir(workingDirectory)
|
|
|
|
}
|
|
|
|
|
2020-05-26 17:16:09 +02:00
|
|
|
const token = core.getInput('token', {required: false})
|
2020-09-01 22:47:38 +02:00
|
|
|
const base = core.getInput('base', {required: false})
|
2020-05-24 22:50:33 +02:00
|
|
|
const filtersInput = core.getInput('filters', {required: true})
|
|
|
|
const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput
|
2020-08-30 21:18:14 +02:00
|
|
|
const listFiles = core.getInput('list-files', {required: false}).toLowerCase() || 'none'
|
2020-09-01 22:47:38 +02:00
|
|
|
const initialFetchDepth = parseInt(core.getInput('initial-fetch-depth', {required: false})) || 10
|
2020-08-30 21:18:14 +02:00
|
|
|
|
|
|
|
if (!isExportFormat(listFiles)) {
|
|
|
|
core.setFailed(`Input parameter 'list-files' is set to invalid value '${listFiles}'`)
|
|
|
|
return
|
|
|
|
}
|
2020-05-24 22:50:33 +02:00
|
|
|
|
|
|
|
const filter = new Filter(filtersYaml)
|
2020-09-01 22:47:38 +02:00
|
|
|
const files = await getChangedFiles(token, base, initialFetchDepth)
|
|
|
|
const results = filter.match(files)
|
|
|
|
exportResults(results, listFiles)
|
2020-05-20 17:03:08 +02:00
|
|
|
} catch (error) {
|
|
|
|
core.setFailed(error.message)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-24 22:50:33 +02:00
|
|
|
function isPathInput(text: string): boolean {
|
|
|
|
return !text.includes('\n')
|
|
|
|
}
|
|
|
|
|
|
|
|
function getConfigFileContent(configPath: string): string {
|
|
|
|
if (!fs.existsSync(configPath)) {
|
|
|
|
throw new Error(`Configuration file '${configPath}' not found`)
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!fs.lstatSync(configPath).isFile()) {
|
|
|
|
throw new Error(`'${configPath}' is not a file.`)
|
|
|
|
}
|
|
|
|
|
|
|
|
return fs.readFileSync(configPath, {encoding: 'utf8'})
|
|
|
|
}
|
|
|
|
|
2020-09-01 22:47:38 +02:00
|
|
|
async function getChangedFiles(token: string, base: string, initialFetchDepth: number): Promise<File[]> {
|
2020-08-13 12:44:57 -07:00
|
|
|
if (github.context.eventName === 'pull_request' || github.context.eventName === 'pull_request_target') {
|
2020-06-15 21:49:10 +02:00
|
|
|
const pr = github.context.payload.pull_request as Webhooks.WebhookPayloadPullRequestPullRequest
|
2020-09-30 00:32:49 +02:00
|
|
|
if (token) {
|
|
|
|
return await getChangedFilesFromApi(token, pr)
|
|
|
|
}
|
|
|
|
core.info('Github token is not available - changes will be detected from PRs merge commit')
|
|
|
|
return await git.getChangesInLastCommit()
|
2020-06-15 21:49:10 +02:00
|
|
|
} else {
|
2020-10-16 12:28:12 +02:00
|
|
|
return getChangedFilesFromGit(base, initialFetchDepth)
|
2020-06-15 21:49:10 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-16 12:28:12 +02:00
|
|
|
async function getChangedFilesFromGit(base: string, initialFetchDepth: number): Promise<File[]> {
|
|
|
|
const defaultRef = github.context.payload.repository?.default_branch
|
|
|
|
|
|
|
|
const beforeSha =
|
|
|
|
github.context.eventName === 'push' ? (github.context.payload as Webhooks.WebhookPayloadPush).before : null
|
2020-09-30 00:32:49 +02:00
|
|
|
|
|
|
|
const pushRef =
|
2020-10-16 12:28:12 +02:00
|
|
|
git.getShortName(github.context.ref) ||
|
2020-09-30 00:32:49 +02:00
|
|
|
(core.warning(`'ref' field is missing in PUSH event payload - using current branch, tag or commit SHA`),
|
|
|
|
await git.getCurrentRef())
|
|
|
|
|
|
|
|
const baseRef = git.getShortName(base) || defaultRef
|
|
|
|
if (!baseRef) {
|
|
|
|
throw new Error(
|
|
|
|
"This action requires 'base' input to be configured or 'repository.default_branch' to be set in the event payload"
|
|
|
|
)
|
2020-07-11 23:33:11 +02:00
|
|
|
}
|
2020-06-24 21:53:31 +02:00
|
|
|
|
2020-10-16 12:28:12 +02:00
|
|
|
const isBaseRefSha = git.isGitSha(baseRef)
|
|
|
|
const isBaseSameAsPush = baseRef === pushRef
|
|
|
|
|
|
|
|
// If base is commit SHA will do comparison against the referenced commit
|
|
|
|
// Or If base references same branch it was pushed to, we will do comparison against the previously pushed commit
|
|
|
|
if (isBaseRefSha || isBaseSameAsPush) {
|
|
|
|
if (!isBaseRefSha && !beforeSha) {
|
2020-09-30 00:32:49 +02:00
|
|
|
core.warning(`'before' field is missing in PUSH event payload - changes will be detected from last commit`)
|
|
|
|
return await git.getChangesInLastCommit()
|
|
|
|
}
|
|
|
|
|
2020-10-16 12:28:12 +02:00
|
|
|
const baseSha = isBaseRefSha ? baseRef : beforeSha
|
2020-09-30 00:32:49 +02:00
|
|
|
// If there is no previously pushed commit,
|
|
|
|
// we will do comparison against the default branch or return all as added
|
2020-10-16 12:28:12 +02:00
|
|
|
if (baseSha === git.NULL_SHA) {
|
2020-09-30 00:32:49 +02:00
|
|
|
if (defaultRef && baseRef !== defaultRef) {
|
|
|
|
core.info(`First push of a branch detected - changes will be detected against the default branch ${defaultRef}`)
|
|
|
|
return await git.getChangesSinceMergeBase(defaultRef, initialFetchDepth)
|
|
|
|
} else {
|
|
|
|
core.info('Initial push detected - all files will be listed as added')
|
|
|
|
return await git.listAllFilesAsAdded()
|
|
|
|
}
|
2020-09-01 22:47:38 +02:00
|
|
|
}
|
2020-06-24 21:53:31 +02:00
|
|
|
|
2020-10-16 12:28:12 +02:00
|
|
|
core.info(`Changes will be detected against commit (${baseSha})`)
|
|
|
|
return await git.getChanges(baseSha)
|
2020-09-01 22:47:38 +02:00
|
|
|
}
|
2020-06-24 21:53:31 +02:00
|
|
|
|
2020-09-01 22:47:38 +02:00
|
|
|
// Changes introduced by current branch against the base branch
|
|
|
|
core.info(`Changes will be detected against the branch ${baseRef}`)
|
2020-09-30 00:32:49 +02:00
|
|
|
return await git.getChangesSinceMergeBase(baseRef, initialFetchDepth)
|
2020-05-26 17:16:09 +02:00
|
|
|
}
|
|
|
|
|
2020-05-21 00:31:16 +02:00
|
|
|
// Uses github REST api to get list of files changed in PR
|
2020-05-26 17:16:09 +02:00
|
|
|
async function getChangedFilesFromApi(
|
|
|
|
token: string,
|
2020-05-21 00:31:16 +02:00
|
|
|
pullRequest: Webhooks.WebhookPayloadPullRequestPullRequest
|
2020-07-11 17:17:56 +02:00
|
|
|
): Promise<File[]> {
|
2020-07-11 23:33:11 +02:00
|
|
|
core.info(`Fetching list of changed files for PR#${pullRequest.number} from Github API`)
|
2020-05-26 17:16:09 +02:00
|
|
|
const client = new github.GitHub(token)
|
2020-05-21 00:31:16 +02:00
|
|
|
const pageSize = 100
|
2020-07-11 17:17:56 +02:00
|
|
|
const files: File[] = []
|
2020-05-21 00:31:16 +02:00
|
|
|
for (let page = 0; page * pageSize < pullRequest.changed_files; page++) {
|
|
|
|
const response = await client.pulls.listFiles({
|
|
|
|
owner: github.context.repo.owner,
|
|
|
|
repo: github.context.repo.repo,
|
|
|
|
pull_number: pullRequest.number,
|
|
|
|
page,
|
|
|
|
per_page: pageSize
|
|
|
|
})
|
|
|
|
for (const row of response.data) {
|
2020-07-11 17:17:56 +02:00
|
|
|
// There's no obvious use-case for detection of renames
|
|
|
|
// Therefore we treat it as if rename detection in git diff was turned off.
|
|
|
|
// Rename is replaced by delete of original filename and add of new filename
|
|
|
|
if (row.status === ChangeStatus.Renamed) {
|
|
|
|
files.push({
|
|
|
|
filename: row.filename,
|
|
|
|
status: ChangeStatus.Added
|
|
|
|
})
|
|
|
|
files.push({
|
|
|
|
// 'previous_filename' for some unknown reason isn't in the type definition or documentation
|
|
|
|
filename: (<any>row).previous_filename as string,
|
|
|
|
status: ChangeStatus.Deleted
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
files.push({
|
|
|
|
filename: row.filename,
|
|
|
|
status: row.status as ChangeStatus
|
|
|
|
})
|
|
|
|
}
|
2020-05-21 00:31:16 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return files
|
|
|
|
}
|
|
|
|
|
2020-08-30 21:18:14 +02:00
|
|
|
function exportResults(results: FilterResults, format: ExportFormat): void {
|
2020-09-01 22:47:38 +02:00
|
|
|
core.info('Results:')
|
2020-08-30 21:18:14 +02:00
|
|
|
for (const [key, files] of Object.entries(results)) {
|
|
|
|
const value = files.length > 0
|
|
|
|
core.startGroup(`Filter ${key} = ${value}`)
|
2020-09-01 22:47:38 +02:00
|
|
|
if (files.length > 0) {
|
|
|
|
core.info('Matching files:')
|
|
|
|
for (const file of files) {
|
|
|
|
core.info(`${file.filename} [${file.status}]`)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
core.info('Matching files: none')
|
2020-07-11 23:33:11 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
core.setOutput(key, value)
|
2020-08-30 21:18:14 +02:00
|
|
|
if (format !== 'none') {
|
|
|
|
const filesValue = serializeExport(files, format)
|
|
|
|
core.setOutput(`${key}_files`, filesValue)
|
|
|
|
}
|
2020-07-11 23:33:11 +02:00
|
|
|
}
|
|
|
|
core.endGroup()
|
|
|
|
}
|
|
|
|
|
2020-08-30 21:18:14 +02:00
|
|
|
function serializeExport(files: File[], format: ExportFormat): string {
|
|
|
|
const fileNames = files.map(file => file.filename)
|
|
|
|
switch (format) {
|
|
|
|
case 'json':
|
|
|
|
return JSON.stringify(fileNames)
|
|
|
|
case 'shell':
|
|
|
|
return fileNames.map(shellEscape).join(' ')
|
|
|
|
default:
|
|
|
|
return ''
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function isExportFormat(value: string): value is ExportFormat {
|
|
|
|
return value === 'none' || value === 'shell' || value === 'json'
|
|
|
|
}
|
|
|
|
|
2020-05-20 17:03:08 +02:00
|
|
|
run()
|