mirror of
https://gitea.com/actions/setup-node.git
synced 2025-04-22 16:55:37 +08:00
review fixes
This commit is contained in:
parent
9ddc512bc1
commit
d2b7e08b80
11
dist/cache-save/index.js
vendored
11
dist/cache-save/index.js
vendored
@ -60522,15 +60522,14 @@ const getProjectDirectoriesFromCacheDependencyPath = (cacheDependencyPath) => __
|
|||||||
cacheDependenciesPaths = memoized;
|
cacheDependenciesPaths = memoized;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
cacheDependenciesPaths = (yield glob
|
const globber = yield glob.create(cacheDependencyPath);
|
||||||
.create(cacheDependencyPath)
|
cacheDependenciesPaths = (yield globber.glob()) || [''];
|
||||||
.then(globber => globber.glob())) || [''];
|
|
||||||
exports.memoizedCacheDependencies[cacheDependencyPath] = cacheDependenciesPaths;
|
exports.memoizedCacheDependencies[cacheDependencyPath] = cacheDependenciesPaths;
|
||||||
}
|
}
|
||||||
const existingDirectories = cacheDependenciesPaths
|
const existingDirectories = cacheDependenciesPaths
|
||||||
.map(cacheDependencyPath => path_1.default.dirname(cacheDependencyPath))
|
.map(path_1.default.dirname)
|
||||||
// uniq in order to do not traverse the same directories during the further processing
|
// uniq in order to do not traverse the same directories during the further processing
|
||||||
.filter((cachePath, i, result) => cachePath != null && result.indexOf(cachePath) === i)
|
.filter((item, i, src) => item != null && src.indexOf(item) === i)
|
||||||
.filter(directory => fs_1.default.existsSync(directory) && fs_1.default.lstatSync(directory).isDirectory());
|
.filter(directory => fs_1.default.existsSync(directory) && fs_1.default.lstatSync(directory).isDirectory());
|
||||||
// if user explicitly pointed out some file, but it does not exist it is definitely
|
// if user explicitly pointed out some file, but it does not exist it is definitely
|
||||||
// not he wanted, thus we should throw an error not trying to workaround with unexpected
|
// not he wanted, thus we should throw an error not trying to workaround with unexpected
|
||||||
@ -60555,7 +60554,7 @@ const getCacheDirectoriesFromCacheDependencyPath = (packageManagerInfo, cacheDep
|
|||||||
return cacheFolderPath;
|
return cacheFolderPath;
|
||||||
})));
|
})));
|
||||||
// uniq in order to do not cache the same directories twice
|
// uniq in order to do not cache the same directories twice
|
||||||
return cacheFoldersPaths.filter((cachePath, i, result) => result.indexOf(cachePath) === i);
|
return cacheFoldersPaths.filter((item, i, src) => src.indexOf(item) === i);
|
||||||
});
|
});
|
||||||
/**
|
/**
|
||||||
* Finds the cache directories configured for the repo ignoring cache-dependency-path
|
* Finds the cache directories configured for the repo ignoring cache-dependency-path
|
||||||
|
11
dist/setup/index.js
vendored
11
dist/setup/index.js
vendored
@ -71304,15 +71304,14 @@ const getProjectDirectoriesFromCacheDependencyPath = (cacheDependencyPath) => __
|
|||||||
cacheDependenciesPaths = memoized;
|
cacheDependenciesPaths = memoized;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
cacheDependenciesPaths = (yield glob
|
const globber = yield glob.create(cacheDependencyPath);
|
||||||
.create(cacheDependencyPath)
|
cacheDependenciesPaths = (yield globber.glob()) || [''];
|
||||||
.then(globber => globber.glob())) || [''];
|
|
||||||
exports.memoizedCacheDependencies[cacheDependencyPath] = cacheDependenciesPaths;
|
exports.memoizedCacheDependencies[cacheDependencyPath] = cacheDependenciesPaths;
|
||||||
}
|
}
|
||||||
const existingDirectories = cacheDependenciesPaths
|
const existingDirectories = cacheDependenciesPaths
|
||||||
.map(cacheDependencyPath => path_1.default.dirname(cacheDependencyPath))
|
.map(path_1.default.dirname)
|
||||||
// uniq in order to do not traverse the same directories during the further processing
|
// uniq in order to do not traverse the same directories during the further processing
|
||||||
.filter((cachePath, i, result) => cachePath != null && result.indexOf(cachePath) === i)
|
.filter((item, i, src) => item != null && src.indexOf(item) === i)
|
||||||
.filter(directory => fs_1.default.existsSync(directory) && fs_1.default.lstatSync(directory).isDirectory());
|
.filter(directory => fs_1.default.existsSync(directory) && fs_1.default.lstatSync(directory).isDirectory());
|
||||||
// if user explicitly pointed out some file, but it does not exist it is definitely
|
// if user explicitly pointed out some file, but it does not exist it is definitely
|
||||||
// not he wanted, thus we should throw an error not trying to workaround with unexpected
|
// not he wanted, thus we should throw an error not trying to workaround with unexpected
|
||||||
@ -71337,7 +71336,7 @@ const getCacheDirectoriesFromCacheDependencyPath = (packageManagerInfo, cacheDep
|
|||||||
return cacheFolderPath;
|
return cacheFolderPath;
|
||||||
})));
|
})));
|
||||||
// uniq in order to do not cache the same directories twice
|
// uniq in order to do not cache the same directories twice
|
||||||
return cacheFoldersPaths.filter((cachePath, i, result) => result.indexOf(cachePath) === i);
|
return cacheFoldersPaths.filter((item, i, src) => src.indexOf(item) === i);
|
||||||
});
|
});
|
||||||
/**
|
/**
|
||||||
* Finds the cache directories configured for the repo ignoring cache-dependency-path
|
* Finds the cache directories configured for the repo ignoring cache-dependency-path
|
||||||
|
@ -131,19 +131,15 @@ const getProjectDirectoriesFromCacheDependencyPath = async (
|
|||||||
if (memoized) {
|
if (memoized) {
|
||||||
cacheDependenciesPaths = memoized;
|
cacheDependenciesPaths = memoized;
|
||||||
} else {
|
} else {
|
||||||
cacheDependenciesPaths = (await glob
|
const globber = await glob.create(cacheDependencyPath);
|
||||||
.create(cacheDependencyPath)
|
cacheDependenciesPaths = (await globber.glob()) || [''];
|
||||||
.then(globber => globber.glob())) || [''];
|
|
||||||
memoizedCacheDependencies[cacheDependencyPath] = cacheDependenciesPaths;
|
memoizedCacheDependencies[cacheDependencyPath] = cacheDependenciesPaths;
|
||||||
}
|
}
|
||||||
|
|
||||||
const existingDirectories: string[] = cacheDependenciesPaths
|
const existingDirectories: string[] = cacheDependenciesPaths
|
||||||
.map(cacheDependencyPath => path.dirname(cacheDependencyPath))
|
.map(path.dirname)
|
||||||
// uniq in order to do not traverse the same directories during the further processing
|
// uniq in order to do not traverse the same directories during the further processing
|
||||||
.filter(
|
.filter((item, i, src) => item != null && src.indexOf(item) === i)
|
||||||
(cachePath, i, result) =>
|
|
||||||
cachePath != null && result.indexOf(cachePath) === i
|
|
||||||
)
|
|
||||||
.filter(
|
.filter(
|
||||||
directory =>
|
directory =>
|
||||||
fs.existsSync(directory) && fs.lstatSync(directory).isDirectory()
|
fs.existsSync(directory) && fs.lstatSync(directory).isDirectory()
|
||||||
@ -187,9 +183,7 @@ const getCacheDirectoriesFromCacheDependencyPath = async (
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
// uniq in order to do not cache the same directories twice
|
// uniq in order to do not cache the same directories twice
|
||||||
return cacheFoldersPaths.filter(
|
return cacheFoldersPaths.filter((item, i, src) => src.indexOf(item) === i);
|
||||||
(cachePath, i, result) => result.indexOf(cachePath) === i
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
Loading…
x
Reference in New Issue
Block a user