Compare commits

...

8 Commits

Author SHA1 Message Date
Peter Evans
163be38112 Reset git client's extraheader list to remove local config 2020-03-07 08:50:57 +09:00
Peter Evans
01aa132594 Override auth extraheader with more specific url for git-lfs client 2020-03-07 08:50:49 +09:00
Peter Evans
f6dff3ab2e Revert unset and restore of auth extraheader 2020-03-07 08:50:43 +09:00
Peter Evans
64c34f6885 Update README 2020-03-03 09:49:52 +09:00
Peter Evans
ce00b952cf Merge pull request #128 from peter-evans/dev
Unset and restore authorization extraheader only
2020-02-22 17:02:15 +09:00
Peter Evans
0d42c285a3 Unset and restore authorization extraheader only 2020-02-22 16:56:42 +09:00
Peter Evans
ea1eaf1734 Merge pull request #127 from peter-evans/dev
Unset and restore extraheader config option
2020-02-22 14:53:46 +09:00
Peter Evans
d5c5ea3e20 Unset and restore extraheader config option 2020-02-22 14:08:54 +09:00
25 changed files with 1158 additions and 183 deletions

17
.eslintrc.json Normal file
View File

@@ -0,0 +1,17 @@
{
"env": {
"commonjs": true,
"es6": true,
"node": true
},
"extends": "eslint:recommended",
"globals": {
"Atomics": "readonly",
"SharedArrayBuffer": "readonly"
},
"parserOptions": {
"ecmaVersion": 2018
},
"rules": {
}
}

View File

@@ -181,6 +181,7 @@ jobs:
labels: report, automated pr
assignees: peter-evans
reviewers: peter-evans
team-reviewers: owners, maintainers
milestone: 1
project: Example Project
project-column: To do

View File

@@ -129,7 +129,10 @@ if protocol == "HTTPS":
# Mask the basic credential in logs and debug output
print(f"::add-mask::{basic_credential}")
repo.git.set_persistent_git_options(
c=f"http.https://github.com/.extraheader=AUTHORIZATION: basic {basic_credential}"
c=[
f"http.{repo_url}/.extraheader=",
f"http.{repo_url}/.extraheader=AUTHORIZATION: basic {basic_credential}",
]
)
# Determine if the checked out ref is a valid base for a pull request

240
dist/index.js vendored
View File

@@ -34,7 +34,7 @@ module.exports =
/******/ // the startup function
/******/ function startup() {
/******/ // Load entry module and return exports
/******/ return __webpack_require__(104);
/******/ return __webpack_require__(676);
/******/ };
/******/
/******/ // run startup
@@ -973,21 +973,6 @@ module.exports = util.assign(
module.exports = require("tls");
/***/ }),
/***/ 58:
/***/ (function(module, __unusedexports, __webpack_require__) {
// Unique ID creation requires a high quality random # generator. In node.js
// this is pretty straight-forward - we use the crypto API.
var crypto = __webpack_require__(417);
module.exports = function nodeRNG() {
return crypto.randomBytes(16);
};
/***/ }),
/***/ 87:
@@ -998,109 +983,6 @@ module.exports = require("os");
/***/ }),
/***/ 104:
/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) {
const { inspect } = __webpack_require__(669);
const isDocker = __webpack_require__(160);
const core = __webpack_require__(470);
const exec = __webpack_require__(986);
const setupPython = __webpack_require__(139);
async function run() {
try {
// Allows ncc to find assets to be included in the distribution
const src = __webpack_require__.ab + "src";
core.debug(`src: ${src}`);
// Determine how to access python and pip
const { pip, python } = (function() {
if (isDocker()) {
core.info("Running inside a Docker container");
// Python 3 assumed to be installed and on the PATH
return {
pip: "pip3",
python: "python3"
};
} else {
// Setup Python from the tool cache
setupPython("3.x", "x64");
return {
pip: "pip",
python: "python"
};
}
})();
// Install requirements
await exec.exec(pip, [
"install",
"--requirement",
`${src}/requirements.txt`,
"--no-index",
`--find-links=${__dirname}/vendor`
]);
// Fetch action inputs
const inputs = {
token: core.getInput("token"),
path: core.getInput("path"),
commitMessage: core.getInput("commit-message"),
committer: core.getInput("committer"),
author: core.getInput("author"),
title: core.getInput("title"),
body: core.getInput("body"),
labels: core.getInput("labels"),
assignees: core.getInput("assignees"),
reviewers: core.getInput("reviewers"),
teamReviewers: core.getInput("team-reviewers"),
milestone: core.getInput("milestone"),
project: core.getInput("project"),
projectColumn: core.getInput("project-column"),
branch: core.getInput("branch"),
base: core.getInput("base"),
branchSuffix: core.getInput("branch-suffix")
};
core.debug(`Inputs: ${inspect(inputs)}`);
// Set environment variables from inputs.
if (inputs.token) process.env.GITHUB_TOKEN = inputs.token;
if (inputs.path) process.env.CPR_PATH = inputs.path;
if (inputs.commitMessage) process.env.CPR_COMMIT_MESSAGE = inputs.commitMessage;
if (inputs.committer) process.env.CPR_COMMITTER = inputs.committer;
if (inputs.author) process.env.CPR_AUTHOR = inputs.author;
if (inputs.title) process.env.CPR_TITLE = inputs.title;
if (inputs.body) process.env.CPR_BODY = inputs.body;
if (inputs.labels) process.env.CPR_LABELS = inputs.labels;
if (inputs.assignees) process.env.CPR_ASSIGNEES = inputs.assignees;
if (inputs.reviewers) process.env.CPR_REVIEWERS = inputs.reviewers;
if (inputs.teamReviewers) process.env.CPR_TEAM_REVIEWERS = inputs.teamReviewers;
if (inputs.milestone) process.env.CPR_MILESTONE = inputs.milestone;
if (inputs.project) process.env.CPR_PROJECT_NAME = inputs.project;
if (inputs.projectColumn) process.env.CPR_PROJECT_COLUMN_NAME = inputs.projectColumn;
if (inputs.branch) process.env.CPR_BRANCH = inputs.branch;
if (inputs.base) process.env.CPR_BASE = inputs.base;
if (inputs.branchSuffix) process.env.CPR_BRANCH_SUFFIX = inputs.branchSuffix;
// Execute python script
await exec.exec(python, [`${src}/create_pull_request.py`]);
} catch (error) {
core.setFailed(error.message);
}
}
run();
/***/ }),
/***/ 129:
/***/ (function(module) {
module.exports = require("child_process");
/***/ }),
/***/ 139:
/***/ (function(module, __unusedexports, __webpack_require__) {
const core = __webpack_require__(470);
@@ -1157,6 +1039,28 @@ let setupPython = function(versionSpec, arch) {
module.exports = setupPython;
/***/ }),
/***/ 129:
/***/ (function(module) {
module.exports = require("child_process");
/***/ }),
/***/ 139:
/***/ (function(module, __unusedexports, __webpack_require__) {
// Unique ID creation requires a high quality random # generator. In node.js
// this is pretty straight-forward - we use the crypto API.
var crypto = __webpack_require__(417);
module.exports = function nodeRNG() {
return crypto.randomBytes(16);
};
/***/ }),
/***/ 141:
@@ -4301,6 +4205,102 @@ function isUnixExecutable(stats) {
}
//# sourceMappingURL=io-util.js.map
/***/ }),
/***/ 676:
/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) {
const { inspect } = __webpack_require__(669);
const isDocker = __webpack_require__(160);
const core = __webpack_require__(470);
const exec = __webpack_require__(986);
const setupPython = __webpack_require__(104);
async function run() {
try {
// Allows ncc to find assets to be included in the distribution
const cpr = __webpack_require__.ab + "cpr";
core.debug(`cpr: ${cpr}`);
// Determine how to access python and pip
const { pip, python } = (function() {
if (isDocker()) {
core.info("Running inside a Docker container");
// Python 3 assumed to be installed and on the PATH
return {
pip: "pip3",
python: "python3"
};
} else {
// Setup Python from the tool cache
setupPython("3.x", "x64");
return {
pip: "pip",
python: "python"
};
}
})();
// Install requirements
await exec.exec(pip, [
"install",
"--requirement",
`${cpr}/requirements.txt`,
"--no-index",
`--find-links=${__dirname}/vendor`
]);
// Fetch action inputs
const inputs = {
token: core.getInput("token"),
path: core.getInput("path"),
commitMessage: core.getInput("commit-message"),
committer: core.getInput("committer"),
author: core.getInput("author"),
title: core.getInput("title"),
body: core.getInput("body"),
labels: core.getInput("labels"),
assignees: core.getInput("assignees"),
reviewers: core.getInput("reviewers"),
teamReviewers: core.getInput("team-reviewers"),
milestone: core.getInput("milestone"),
project: core.getInput("project"),
projectColumn: core.getInput("project-column"),
branch: core.getInput("branch"),
base: core.getInput("base"),
branchSuffix: core.getInput("branch-suffix")
};
core.debug(`Inputs: ${inspect(inputs)}`);
// Set environment variables from inputs.
if (inputs.token) process.env.GITHUB_TOKEN = inputs.token;
if (inputs.path) process.env.CPR_PATH = inputs.path;
if (inputs.commitMessage) process.env.CPR_COMMIT_MESSAGE = inputs.commitMessage;
if (inputs.committer) process.env.CPR_COMMITTER = inputs.committer;
if (inputs.author) process.env.CPR_AUTHOR = inputs.author;
if (inputs.title) process.env.CPR_TITLE = inputs.title;
if (inputs.body) process.env.CPR_BODY = inputs.body;
if (inputs.labels) process.env.CPR_LABELS = inputs.labels;
if (inputs.assignees) process.env.CPR_ASSIGNEES = inputs.assignees;
if (inputs.reviewers) process.env.CPR_REVIEWERS = inputs.reviewers;
if (inputs.teamReviewers) process.env.CPR_TEAM_REVIEWERS = inputs.teamReviewers;
if (inputs.milestone) process.env.CPR_MILESTONE = inputs.milestone;
if (inputs.project) process.env.CPR_PROJECT_NAME = inputs.project;
if (inputs.projectColumn) process.env.CPR_PROJECT_COLUMN_NAME = inputs.projectColumn;
if (inputs.branch) process.env.CPR_BRANCH = inputs.branch;
if (inputs.base) process.env.CPR_BASE = inputs.base;
if (inputs.branchSuffix) process.env.CPR_BRANCH_SUFFIX = inputs.branchSuffix;
// Execute create pull request
await exec.exec(python, [`${cpr}/create_pull_request.py`]);
} catch (error) {
core.setFailed(error.message);
}
}
run();
/***/ }),
/***/ 722:
@@ -4733,7 +4733,7 @@ module.exports = require("zlib");
/***/ 826:
/***/ (function(module, __unusedexports, __webpack_require__) {
var rng = __webpack_require__(58);
var rng = __webpack_require__(139);
var bytesToUuid = __webpack_require__(722);
function v4(options, buf, offset) {

View File

@@ -1,52 +0,0 @@
const core = require("@actions/core");
const tc = require("@actions/tool-cache");
const path = require("path");
const semver = require("semver");
/**
* Setup for Python from the GitHub Actions tool cache
* Converted from https://github.com/actions/setup-python
*
* @param {string} versionSpec version of Python
* @param {string} arch architecture (x64|x32)
*/
let setupPython = function(versionSpec, arch) {
return new Promise((resolve, reject) => {
const IS_WINDOWS = process.platform === "win32";
// Find the version of Python we want in the tool cache
const installDir = tc.find("Python", versionSpec, arch);
core.debug(`installDir: ${installDir}`);
// Set paths
core.exportVariable("pythonLocation", installDir);
core.addPath(installDir);
if (IS_WINDOWS) {
core.addPath(path.join(installDir, "Scripts"));
} else {
core.addPath(path.join(installDir, "bin"));
}
if (IS_WINDOWS) {
// Add --user directory
// `installDir` from tool cache should look like $AGENT_TOOLSDIRECTORY/Python/<semantic version>/x64/
// So if `findLocalTool` succeeded above, we must have a conformant `installDir`
const version = path.basename(path.dirname(installDir));
const major = semver.major(version);
const minor = semver.minor(version);
const userScriptsDir = path.join(
process.env["APPDATA"] || "",
"Python",
`Python${major}${minor}`,
"Scripts"
);
core.addPath(userScriptsDir);
}
// On Linux and macOS, pip will create the --user directory and add it to PATH as needed.
resolve();
});
};
module.exports = setupPython;

Binary file not shown.

BIN
dist/vendor/idna-2.9.tar.gz vendored Normal file

Binary file not shown.

Binary file not shown.

BIN
dist/vendor/requests-2.23.0.tar.gz vendored Normal file

Binary file not shown.

1001
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -5,8 +5,9 @@
"main": "index.js",
"scripts": {
"clean": "rm -rf dist",
"build": "ncc build index.js -o dist",
"vendor-deps": "pip download -r src/requirements.txt --no-binary=:all: -d dist/vendor",
"lint": "eslint src/index.js",
"build": "ncc build src/index.js -o dist",
"vendor-deps": "pip download -r src/cpr/requirements.txt --no-binary=:all: -d dist/vendor",
"package": "npm run build && npm run vendor-deps"
},
"repository": {
@@ -27,6 +28,7 @@
"is-docker": "^2.0.0"
},
"devDependencies": {
"@zeit/ncc": "0.21.1"
"@zeit/ncc": "0.21.1",
"eslint": "6.8.0"
}
}

View File

@@ -129,7 +129,10 @@ if protocol == "HTTPS":
# Mask the basic credential in logs and debug output
print(f"::add-mask::{basic_credential}")
repo.git.set_persistent_git_options(
c=f"http.https://github.com/.extraheader=AUTHORIZATION: basic {basic_credential}"
c=[
f"http.{repo_url}/.extraheader=",
f"http.{repo_url}/.extraheader=AUTHORIZATION: basic {basic_credential}",
]
)
# Determine if the checked out ref is a valid base for a pull request

View File

@@ -2,13 +2,13 @@ const { inspect } = require("util");
const isDocker = require("is-docker");
const core = require("@actions/core");
const exec = require("@actions/exec");
const setupPython = require("./src/setup-python");
const setupPython = require("./setup-python");
async function run() {
try {
// Allows ncc to find assets to be included in the distribution
const src = __dirname + "/src";
core.debug(`src: ${src}`);
const cpr = __dirname + "/cpr";
core.debug(`cpr: ${cpr}`);
// Determine how to access python and pip
const { pip, python } = (function() {
@@ -33,7 +33,7 @@ async function run() {
await exec.exec(pip, [
"install",
"--requirement",
`${src}/requirements.txt`,
`${cpr}/requirements.txt`,
"--no-index",
`--find-links=${__dirname}/vendor`
]);
@@ -79,8 +79,8 @@ async function run() {
if (inputs.base) process.env.CPR_BASE = inputs.base;
if (inputs.branchSuffix) process.env.CPR_BRANCH_SUFFIX = inputs.branchSuffix;
// Execute python script
await exec.exec(python, [`${src}/create_pull_request.py`]);
// Execute create pull request
await exec.exec(python, [`${cpr}/create_pull_request.py`]);
} catch (error) {
core.setFailed(error.message);
}