diff --git a/.eslintrc.js b/.eslintrc.js index 45ba33046050d3e79e714e72756e276291d3fd08..53e5435b797ecb53ca084546c47bdb20dc92408d 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -27,7 +27,6 @@ module.exports = { // TODO: fix lint '@typescript-eslint/camelcase': 'off', // disabled until ?? - '@typescript-eslint/no-use-before-define': 1, '@typescript-eslint/no-explicit-any': 0, '@typescript-eslint/no-non-null-assertion': 0, '@typescript-eslint/no-unused-vars': [ diff --git a/lib/config/presets.ts b/lib/config/presets.ts index 9af6b7b778cc91f58f22f7073e3aa65f143c52a9..48062a64108fdf0dabec483908bcfd9d047789a8 100644 --- a/lib/config/presets.ts +++ b/lib/config/presets.ts @@ -15,111 +15,6 @@ const datasources = { gitlab, }; -export async function resolveConfigPresets( - inputConfig: RenovateConfig, - ignorePresets?: string[], - existingPresets: string[] = [] -): Promise<RenovateConfig> { - if (!ignorePresets) { - ignorePresets = inputConfig.ignorePresets || []; // eslint-disable-line - } - logger.trace( - { config: inputConfig, existingPresets }, - 'resolveConfigPresets' - ); - let config: RenovateConfig = {}; - // First, merge all the preset configs from left to right - if (inputConfig.extends && inputConfig.extends.length) { - for (const preset of inputConfig.extends) { - // istanbul ignore if - if (existingPresets.includes(preset)) { - logger.info(`Already seen preset ${preset} in ${existingPresets}`); - } else if (ignorePresets.includes(preset)) { - // istanbul ignore next - logger.info(`Ignoring preset ${preset} in ${existingPresets}`); - } else { - logger.trace(`Resolving preset "${preset}"`); - let fetchedPreset; - try { - fetchedPreset = await getPreset(preset); - } catch (err) { - logger.debug({ err }, 'Preset fetch error'); - // istanbul ignore if - if ( - err.message === 'platform-failure' || - err.message === 'registry-failure' - ) { - throw err; - } - const error = new Error('config-validation'); - if (err.message === 'dep not found') { - error.validationError = `Cannot find preset's package (${preset})`; - } else if (err.message === 'preset renovate-config not found') { - // istanbul ignore next - error.validationError = `Preset package is missing a renovate-config entry (${preset})`; - } else if (err.message === 'preset not found') { - error.validationError = `Preset name not found within published preset config (${preset})`; - } - // istanbul ignore if - if (existingPresets.length) { - error.validationError += - '. Note: this is a *nested* preset so please contact the preset author if you are unable to fix it yourself.'; - } - logger.info('Throwing preset error'); - throw error; - } - const presetConfig = await resolveConfigPresets( - fetchedPreset, - ignorePresets, - existingPresets.concat([preset]) - ); - // istanbul ignore if - if ( - inputConfig && - inputConfig.ignoreDeps && - inputConfig.ignoreDeps.length === 0 - ) { - delete presetConfig.description; - } - config = mergeChildConfig(config, presetConfig); - } - } - } - logger.trace({ config }, `Post-preset resolve config`); - // Now assign "regular" config on top - config = mergeChildConfig(config, inputConfig); - delete config.extends; - delete config.ignorePresets; - logger.trace({ config }, `Post-merge resolve config`); - for (const [key, val] of Object.entries(config)) { - const ignoredKeys = ['content', 'onboardingConfig']; - if (is.array(val)) { - // Resolve nested objects inside arrays - config[key] = []; - for (const element of val) { - if (is.object(element)) { - config[key].push( - await resolveConfigPresets(element, ignorePresets, existingPresets) - ); - } else { - config[key].push(element); - } - } - } else if (is.object(val) && !ignoredKeys.includes(key)) { - // Resolve nested objects - logger.trace(`Resolving object "${key}"`); - config[key] = await resolveConfigPresets( - val, - ignorePresets, - existingPresets - ); - } - } - logger.trace({ config: inputConfig }, 'Input config'); - logger.trace({ config }, 'Resolved config'); - return config; -} - export function replaceArgs( obj: string | string[] | object | object[], argMapping: Record<string, any> @@ -149,13 +44,6 @@ export function replaceArgs( return obj; } -export interface ParsedPreset { - datasource: string; - packageName: string; - presetName: string; - params?: string[]; -} - export function parsePreset(input: string): ParsedPreset { let str = input; let datasource: string; @@ -246,3 +134,115 @@ export async function getPreset(preset: string): Promise<RenovateConfig> { const { migratedConfig } = migration.migrateConfig(presetConfig); return massage.massageConfig(migratedConfig); } + +export async function resolveConfigPresets( + inputConfig: RenovateConfig, + ignorePresets?: string[], + existingPresets: string[] = [] +): Promise<RenovateConfig> { + if (!ignorePresets) { + ignorePresets = inputConfig.ignorePresets || []; // eslint-disable-line + } + logger.trace( + { config: inputConfig, existingPresets }, + 'resolveConfigPresets' + ); + let config: RenovateConfig = {}; + // First, merge all the preset configs from left to right + if (inputConfig.extends && inputConfig.extends.length) { + for (const preset of inputConfig.extends) { + // istanbul ignore if + if (existingPresets.includes(preset)) { + logger.info(`Already seen preset ${preset} in ${existingPresets}`); + } else if (ignorePresets.includes(preset)) { + // istanbul ignore next + logger.info(`Ignoring preset ${preset} in ${existingPresets}`); + } else { + logger.trace(`Resolving preset "${preset}"`); + let fetchedPreset; + try { + fetchedPreset = await getPreset(preset); + } catch (err) { + logger.debug({ err }, 'Preset fetch error'); + // istanbul ignore if + if ( + err.message === 'platform-failure' || + err.message === 'registry-failure' + ) { + throw err; + } + const error = new Error('config-validation'); + if (err.message === 'dep not found') { + error.validationError = `Cannot find preset's package (${preset})`; + } else if (err.message === 'preset renovate-config not found') { + // istanbul ignore next + error.validationError = `Preset package is missing a renovate-config entry (${preset})`; + } else if (err.message === 'preset not found') { + error.validationError = `Preset name not found within published preset config (${preset})`; + } + // istanbul ignore if + if (existingPresets.length) { + error.validationError += + '. Note: this is a *nested* preset so please contact the preset author if you are unable to fix it yourself.'; + } + logger.info('Throwing preset error'); + throw error; + } + const presetConfig = await resolveConfigPresets( + fetchedPreset, + ignorePresets, + existingPresets.concat([preset]) + ); + // istanbul ignore if + if ( + inputConfig && + inputConfig.ignoreDeps && + inputConfig.ignoreDeps.length === 0 + ) { + delete presetConfig.description; + } + config = mergeChildConfig(config, presetConfig); + } + } + } + logger.trace({ config }, `Post-preset resolve config`); + // Now assign "regular" config on top + config = mergeChildConfig(config, inputConfig); + delete config.extends; + delete config.ignorePresets; + logger.trace({ config }, `Post-merge resolve config`); + for (const [key, val] of Object.entries(config)) { + const ignoredKeys = ['content', 'onboardingConfig']; + if (is.array(val)) { + // Resolve nested objects inside arrays + config[key] = []; + for (const element of val) { + if (is.object(element)) { + config[key].push( + await resolveConfigPresets(element, ignorePresets, existingPresets) + ); + } else { + config[key].push(element); + } + } + } else if (is.object(val) && !ignoredKeys.includes(key)) { + // Resolve nested objects + logger.trace(`Resolving object "${key}"`); + config[key] = await resolveConfigPresets( + val, + ignorePresets, + existingPresets + ); + } + } + logger.trace({ config: inputConfig }, 'Input config'); + logger.trace({ config }, 'Resolved config'); + return config; +} + +export interface ParsedPreset { + datasource: string; + packageName: string; + presetName: string; + params?: string[]; +} diff --git a/lib/datasource/docker/index.ts b/lib/datasource/docker/index.ts index 815f7af94d5a7ad0b074b634cdf84800718b4e49..90a11673d79a37091434b1b33c7ffde9c3d30e99 100644 --- a/lib/datasource/docker/index.ts +++ b/lib/datasource/docker/index.ts @@ -48,6 +48,38 @@ export function getRegistryRepository( }; } +function getECRAuthToken(region: string, opts: hostRules.HostRule) { + const config = { region, accessKeyId: undefined, secretAccessKey: undefined }; + if (opts.username && opts.password) { + config.accessKeyId = opts.username; + config.secretAccessKey = opts.password; + } + const ecr = new AWS.ECR(config); + return new Promise<string>(resolve => { + ecr.getAuthorizationToken({}, (err, data) => { + if (err) { + logger.trace({ err }, 'err'); + logger.info('ECR getAuthorizationToken error'); + resolve(null); + } else { + const authorizationToken = + data && + data.authorizationData && + data.authorizationData[0] && + data.authorizationData[0].authorizationToken; + if (authorizationToken) { + resolve(authorizationToken); + } else { + logger.warn( + 'Could not extract authorizationToken from ECR getAuthorizationToken response' + ); + resolve(null); + } + } + }); + }); +} + async function getAuthHeaders( registry: string, repository: string @@ -376,6 +408,34 @@ async function getTags( } } +export function getConfigResponse(url: string, headers: OutgoingHttpHeaders) { + return got(url, { + headers, + hooks: { + beforeRedirect: [ + (options: any) => { + if ( + options.search && + options.search.indexOf('X-Amz-Algorithm') !== -1 + ) { + // if there is no port in the redirect URL string, then delete it from the redirect options. + // This can be evaluated for removal after upgrading to Got v10 + const portInUrl = options.href.split('/')[2].split(':')[1]; + if (!portInUrl) { + // eslint-disable-next-line no-param-reassign + delete options.port; // Redirect will instead use 80 or 443 for HTTP or HTTPS respectively + } + + // docker registry is hosted on amazon, redirect url includes authentication. + // eslint-disable-next-line no-param-reassign + delete options.headers.authorization; + } + }, + ], + }, + }); +} + /* * docker.getLabels * @@ -496,34 +556,6 @@ async function getLabels( } } -export function getConfigResponse(url: string, headers: OutgoingHttpHeaders) { - return got(url, { - headers, - hooks: { - beforeRedirect: [ - (options: any) => { - if ( - options.search && - options.search.indexOf('X-Amz-Algorithm') !== -1 - ) { - // if there is no port in the redirect URL string, then delete it from the redirect options. - // This can be evaluated for removal after upgrading to Got v10 - const portInUrl = options.href.split('/')[2].split(':')[1]; - if (!portInUrl) { - // eslint-disable-next-line no-param-reassign - delete options.port; // Redirect will instead use 80 or 443 for HTTP or HTTPS respectively - } - - // docker registry is hosted on amazon, redirect url includes authentication. - // eslint-disable-next-line no-param-reassign - delete options.headers.authorization; - } - }, - ], - }, - }); -} - /** * docker.getPkgReleases * @@ -562,35 +594,3 @@ export async function getPkgReleases({ } return ret; } - -function getECRAuthToken(region: string, opts: hostRules.HostRule) { - const config = { region, accessKeyId: undefined, secretAccessKey: undefined }; - if (opts.username && opts.password) { - config.accessKeyId = opts.username; - config.secretAccessKey = opts.password; - } - const ecr = new AWS.ECR(config); - return new Promise<string>(resolve => { - ecr.getAuthorizationToken({}, (err, data) => { - if (err) { - logger.trace({ err }, 'err'); - logger.info('ECR getAuthorizationToken error'); - resolve(null); - } else { - const authorizationToken = - data && - data.authorizationData && - data.authorizationData[0] && - data.authorizationData[0].authorizationToken; - if (authorizationToken) { - resolve(authorizationToken); - } else { - logger.warn( - 'Could not extract authorizationToken from ECR getAuthorizationToken response' - ); - resolve(null); - } - } - }); - }); -} diff --git a/lib/datasource/gitlab/index.ts b/lib/datasource/gitlab/index.ts index c1970060b180069ad711fa88358a8d4b06dd7e0c..7de82a6273cddd0daeb2bfd40929de148431b154 100644 --- a/lib/datasource/gitlab/index.ts +++ b/lib/datasource/gitlab/index.ts @@ -7,6 +7,26 @@ const glGot = api.get; const GitLabApiUrl = 'https://gitlab.com/api/v4/projects'; +async function getDefaultBranchName(urlEncodedPkgName: string) { + const branchesUrl = `${GitLabApiUrl}/${urlEncodedPkgName}/repository/branches`; + type GlBranch = { + default: boolean; + name: string; + }[]; + + const res = await glGot<GlBranch>(branchesUrl); + const branches = res.body; + let defautlBranchName = 'master'; + for (const branch of branches) { + if (branch.default) { + defautlBranchName = branch.name; + break; + } + } + + return defautlBranchName; +} + export async function getPreset( pkgName: string, presetName = 'default' @@ -116,23 +136,3 @@ export async function getPkgReleases({ ); return dependency; } - -async function getDefaultBranchName(urlEncodedPkgName: string) { - const branchesUrl = `${GitLabApiUrl}/${urlEncodedPkgName}/repository/branches`; - type GlBranch = { - default: boolean; - name: string; - }[]; - - const res = await glGot<GlBranch>(branchesUrl); - const branches = res.body; - let defautlBranchName = 'master'; - for (const branch of branches) { - if (branch.default) { - defautlBranchName = branch.name; - break; - } - } - - return defautlBranchName; -} diff --git a/lib/datasource/helm/index.ts b/lib/datasource/helm/index.ts index 64fc95635b118bdc14082d248f0e9cd7ffc91256..dfcd64f8c271978992fd0d1b9d11e40080b40bc7 100644 --- a/lib/datasource/helm/index.ts +++ b/lib/datasource/helm/index.ts @@ -4,35 +4,6 @@ import { PkgReleaseConfig, ReleaseResult } from '../common'; import got from '../../util/got'; import { logger } from '../../logger'; -export async function getPkgReleases({ - lookupName, - registryUrls, -}: PkgReleaseConfig): Promise<ReleaseResult | null> { - if (!lookupName) { - logger.warn(`lookupName was not provided to getPkgReleases`); - return null; - } - const [helmRepository] = registryUrls; - if (!helmRepository) { - logger.warn(`helmRepository was not provided to getPkgReleases`); - return null; - } - const repositoryData = await getRepositoryData(helmRepository); - if (!repositoryData) { - logger.warn(`Couldn't get index.yaml file from ${helmRepository}`); - return null; - } - const releases = repositoryData.find(chart => chart.name === lookupName); - if (!releases) { - logger.warn( - { dependency: lookupName }, - `Entry ${lookupName} doesn't exist in index.yaml from ${helmRepository}` - ); - return null; - } - return releases; -} - export async function getRepositoryData( repository: string ): Promise<ReleaseResult[]> { @@ -89,3 +60,32 @@ export async function getRepositoryData( return null; } } + +export async function getPkgReleases({ + lookupName, + registryUrls, +}: PkgReleaseConfig): Promise<ReleaseResult | null> { + if (!lookupName) { + logger.warn(`lookupName was not provided to getPkgReleases`); + return null; + } + const [helmRepository] = registryUrls; + if (!helmRepository) { + logger.warn(`helmRepository was not provided to getPkgReleases`); + return null; + } + const repositoryData = await getRepositoryData(helmRepository); + if (!repositoryData) { + logger.warn(`Couldn't get index.yaml file from ${helmRepository}`); + return null; + } + const releases = repositoryData.find(chart => chart.name === lookupName); + if (!releases) { + logger.warn( + { dependency: lookupName }, + `Entry ${lookupName} doesn't exist in index.yaml from ${helmRepository}` + ); + return null; + } + return releases; +} diff --git a/lib/datasource/index.ts b/lib/datasource/index.ts index 357ee2934a64d3fadc6b27af5b94276b24f15757..31a6993eff9b8b692fb466bc97131b2453bd8c03 100644 --- a/lib/datasource/index.ts +++ b/lib/datasource/index.ts @@ -57,6 +57,36 @@ const datasources: Record<string, Datasource> = { const cacheNamespace = 'datasource-releases'; +async function fetchReleases( + config: PkgReleaseConfig +): Promise<ReleaseResult | null> { + const { datasource } = config; + if (!datasource) { + logger.warn('No datasource found'); + } + if (!datasources[datasource]) { + logger.warn('Unknown datasource: ' + datasource); + return null; + } + const dep = await datasources[datasource].getPkgReleases(config); + addMetaData(dep, datasource, config.lookupName); + return dep; +} + +function getRawReleases(config: PkgReleaseConfig): Promise<ReleaseResult> { + const cacheKey = + cacheNamespace + + config.datasource + + config.lookupName + + config.registryUrls; + // The repoCache is initialized for each repo + // By returning a Promise and reusing it, we should only fetch each package at most once + if (!global.repoCache[cacheKey]) { + global.repoCache[cacheKey] = fetchReleases(config); + } + return global.repoCache[cacheKey]; +} + export async function getPkgReleases(config: PkgReleaseConfig) { const res = await getRawReleases({ ...config, @@ -81,36 +111,6 @@ export async function getPkgReleases(config: PkgReleaseConfig) { return res; } -function getRawReleases(config: PkgReleaseConfig): Promise<ReleaseResult> { - const cacheKey = - cacheNamespace + - config.datasource + - config.lookupName + - config.registryUrls; - // The repoCache is initialized for each repo - // By returning a Promise and reusing it, we should only fetch each package at most once - if (!global.repoCache[cacheKey]) { - global.repoCache[cacheKey] = fetchReleases(config); - } - return global.repoCache[cacheKey]; -} - -async function fetchReleases( - config: PkgReleaseConfig -): Promise<ReleaseResult | null> { - const { datasource } = config; - if (!datasource) { - logger.warn('No datasource found'); - } - if (!datasources[datasource]) { - logger.warn('Unknown datasource: ' + datasource); - return null; - } - const dep = await datasources[datasource].getPkgReleases(config); - addMetaData(dep, datasource, config.lookupName); - return dep; -} - export function supportsDigests(config: DigestConfig) { return !!datasources[config.datasource].getDigest; } diff --git a/lib/datasource/maven/index.ts b/lib/datasource/maven/index.ts index f5ab21c70f8acffeb974cbbd0ee39a2d6babddd3..ffc6ff5c98e14925b76f98f833922d6629f830f4 100644 --- a/lib/datasource/maven/index.ts +++ b/lib/datasource/maven/index.ts @@ -8,82 +8,12 @@ import { containsPlaceholder } from '../../manager/maven/extract'; import { downloadHttpProtocol } from './util'; import { PkgReleaseConfig, ReleaseResult } from '../common'; -export async function getPkgReleases({ - lookupName, - registryUrls, -}: PkgReleaseConfig): Promise<ReleaseResult | null> { - const versions: string[] = []; - const dependency = getDependencyParts(lookupName); - if (!is.nonEmptyArray(registryUrls)) { - logger.warn(`No repositories defined for ${dependency.display}`); - return null; - } - const repositories = registryUrls.map(repository => - repository.replace(/\/?$/, '/') - ); - logger.debug( - `Found ${repositories.length} repositories for ${dependency.display}` - ); - const repoForVersions = {}; - for (let i = 0; i < repositories.length; i += 1) { - const repoUrl = repositories[i]; - logger.debug( - `Looking up ${dependency.display} in repository #${i} - ${repoUrl}` - ); - const mavenMetadata = await downloadMavenXml( - dependency, - repoUrl, - 'maven-metadata.xml' - ); - if (mavenMetadata) { - const newVersions = extractVersions(mavenMetadata).filter( - version => !versions.includes(version) - ); - const latestVersion = getLatestVersion(newVersions); - if (latestVersion) { - repoForVersions[latestVersion] = repoUrl; - } - versions.push(...newVersions); - logger.debug(`Found ${newVersions.length} new versions for ${dependency.display} in repository ${repoUrl}`); // prettier-ignore - } - } - - if (versions.length === 0) { - logger.info(`No versions found for ${dependency.display} in ${repositories.length} repositories`); // prettier-ignore +async function downloadFileProtocol(pkgUrl: url.URL): Promise<string | null> { + const pkgPath = pkgUrl.toString().replace('file://', ''); + if (!(await fs.exists(pkgPath))) { return null; } - logger.debug(`Found ${versions.length} versions for ${dependency.display}`); - const latestVersion = getLatestVersion(versions); - const repoUrl = repoForVersions[latestVersion]; - const dependencyInfo = await getDependencyInfo( - dependency, - repoUrl, - latestVersion - ); - - return { - ...dependency, - ...dependencyInfo, - releases: versions.map(v => ({ version: v })), - }; -} - -function getDependencyParts(lookupName: string): MavenDependency { - const [group, name] = lookupName.split(':'); - const dependencyUrl = `${group.replace(/\./g, '/')}/${name}`; - return { - display: lookupName, - group, - name, - dependencyUrl, - }; -} - -interface MavenDependency { - display: string; - group?: string; - name?: string; - dependencyUrl: string; + return fs.readFile(pkgPath, 'utf8'); } async function downloadMavenXml( @@ -137,28 +67,6 @@ async function downloadMavenXml( } } -function extractVersions(metadata: XmlDocument): string[] { - const versions = metadata.descendantWithPath('versioning.versions'); - const elements = versions && versions.childrenNamed('version'); - if (!elements) return []; - return elements.map(el => el.val); -} - -async function downloadFileProtocol(pkgUrl: url.URL): Promise<string | null> { - const pkgPath = pkgUrl.toString().replace('file://', ''); - if (!(await fs.exists(pkgPath))) { - return null; - } - return fs.readFile(pkgPath, 'utf8'); -} - -function getLatestVersion(versions: string[]): string | null { - if (versions.length === 0) return null; - return versions.reduce((latestVersion, version) => - compare(version, latestVersion) === 1 ? version : latestVersion - ); -} - async function getDependencyInfo( dependency: MavenDependency, repoUrl: string, @@ -182,3 +90,95 @@ async function getDependencyInfo( return result; } + +function getLatestVersion(versions: string[]): string | null { + if (versions.length === 0) return null; + return versions.reduce((latestVersion, version) => + compare(version, latestVersion) === 1 ? version : latestVersion + ); +} + +interface MavenDependency { + display: string; + group?: string; + name?: string; + dependencyUrl: string; +} + +function getDependencyParts(lookupName: string): MavenDependency { + const [group, name] = lookupName.split(':'); + const dependencyUrl = `${group.replace(/\./g, '/')}/${name}`; + return { + display: lookupName, + group, + name, + dependencyUrl, + }; +} + +function extractVersions(metadata: XmlDocument): string[] { + const versions = metadata.descendantWithPath('versioning.versions'); + const elements = versions && versions.childrenNamed('version'); + if (!elements) return []; + return elements.map(el => el.val); +} + +export async function getPkgReleases({ + lookupName, + registryUrls, +}: PkgReleaseConfig): Promise<ReleaseResult | null> { + const versions: string[] = []; + const dependency = getDependencyParts(lookupName); + if (!is.nonEmptyArray(registryUrls)) { + logger.warn(`No repositories defined for ${dependency.display}`); + return null; + } + const repositories = registryUrls.map(repository => + repository.replace(/\/?$/, '/') + ); + logger.debug( + `Found ${repositories.length} repositories for ${dependency.display}` + ); + const repoForVersions = {}; + for (let i = 0; i < repositories.length; i += 1) { + const repoUrl = repositories[i]; + logger.debug( + `Looking up ${dependency.display} in repository #${i} - ${repoUrl}` + ); + const mavenMetadata = await downloadMavenXml( + dependency, + repoUrl, + 'maven-metadata.xml' + ); + if (mavenMetadata) { + const newVersions = extractVersions(mavenMetadata).filter( + version => !versions.includes(version) + ); + const latestVersion = getLatestVersion(newVersions); + if (latestVersion) { + repoForVersions[latestVersion] = repoUrl; + } + versions.push(...newVersions); + logger.debug(`Found ${newVersions.length} new versions for ${dependency.display} in repository ${repoUrl}`); // prettier-ignore + } + } + + if (versions.length === 0) { + logger.info(`No versions found for ${dependency.display} in ${repositories.length} repositories`); // prettier-ignore + return null; + } + logger.debug(`Found ${versions.length} versions for ${dependency.display}`); + const latestVersion = getLatestVersion(versions); + const repoUrl = repoForVersions[latestVersion]; + const dependencyInfo = await getDependencyInfo( + dependency, + repoUrl, + latestVersion + ); + + return { + ...dependency, + ...dependencyInfo, + releases: versions.map(v => ({ version: v })), + }; +} diff --git a/lib/datasource/npm/npmrc.ts b/lib/datasource/npm/npmrc.ts index 832f9372512ac35d6b1b667327e48bff4492c7e1..6a5f36af61517992fd982dee19844f40da1ba796 100644 --- a/lib/datasource/npm/npmrc.ts +++ b/lib/datasource/npm/npmrc.ts @@ -10,6 +10,23 @@ export function getNpmrc(): Record<string, any> | null { return npmrc; } +function envReplace(value: any, env = process.env): any { + // istanbul ignore if + if (!is.string(value)) { + return value; + } + + const ENV_EXPR = /(\\*)\$\{([^}]+)\}/g; + + return value.replace(ENV_EXPR, (match, esc, envVarName) => { + if (env[envVarName] === undefined) { + logger.warn('Failed to replace env in config: ' + match); + throw new Error('env-replace'); + } + return env[envVarName]; + }); +} + export function setNpmrc(input?: string) { if (input) { if (input === npmrcRaw) { @@ -56,20 +73,3 @@ export function setNpmrc(input?: string) { npmrcRaw = null; } } - -function envReplace(value: any, env = process.env): any { - // istanbul ignore if - if (!is.string(value)) { - return value; - } - - const ENV_EXPR = /(\\*)\$\{([^}]+)\}/g; - - return value.replace(ENV_EXPR, (match, esc, envVarName) => { - if (env[envVarName] === undefined) { - logger.warn('Failed to replace env in config: ' + match); - throw new Error('env-replace'); - } - return env[envVarName]; - }); -} diff --git a/lib/datasource/nuget/index.ts b/lib/datasource/nuget/index.ts index 529bdf126ed7c00060c3f86e1733a5f64f44c6bb..8e50396c49983738fa3ec9e30dd816b2d79765a7 100644 --- a/lib/datasource/nuget/index.ts +++ b/lib/datasource/nuget/index.ts @@ -4,6 +4,20 @@ import * as v2 from './v2'; import * as v3 from './v3'; import { PkgReleaseConfig, ReleaseResult } from '../common'; +function detectFeedVersion(url: string): 2 | 3 | null { + try { + const parsecUrl = urlApi.parse(url); + // Official client does it in the same way + if (parsecUrl.pathname.endsWith('.json')) { + return 3; + } + return 2; + } catch (e) { + logger.debug({ e }, `nuget registry failure: can't parse ${url}`); + return null; + } +} + export async function getPkgReleases({ lookupName, registryUrls, @@ -32,17 +46,3 @@ export async function getPkgReleases({ } return dep; } - -function detectFeedVersion(url: string): 2 | 3 | null { - try { - const parsecUrl = urlApi.parse(url); - // Official client does it in the same way - if (parsecUrl.pathname.endsWith('.json')) { - return 3; - } - return 2; - } catch (e) { - logger.debug({ e }, `nuget registry failure: can't parse ${url}`); - return null; - } -} diff --git a/lib/datasource/nuget/v2.ts b/lib/datasource/nuget/v2.ts index ffed8c62580178617908bdb727d36710211b6eae..0a55bdbcd127cd46ad3d20d590a8799d6836c551 100644 --- a/lib/datasource/nuget/v2.ts +++ b/lib/datasource/nuget/v2.ts @@ -4,6 +4,10 @@ import { logger } from '../../logger'; import got from '../../util/got'; import { ReleaseResult } from '../common'; +function getPkgProp(pkgInfo: XmlElement, propName: string) { + return pkgInfo.childNamed('m:properties').childNamed(`d:${propName}`).val; +} + export async function getPkgReleases( feedUrl: string, pkgName: string @@ -73,7 +77,3 @@ export async function getPkgReleases( return null; } } - -function getPkgProp(pkgInfo: XmlElement, propName: string) { - return pkgInfo.childNamed('m:properties').childNamed(`d:${propName}`).val; -} diff --git a/lib/datasource/pypi/index.ts b/lib/datasource/pypi/index.ts index f4088786b14b3e090d34a688feb49955998e923f..ea8a9e58b96302aec9417cf62a6adaa974988180 100644 --- a/lib/datasource/pypi/index.ts +++ b/lib/datasource/pypi/index.ts @@ -28,33 +28,6 @@ function compatibleVersions( ); } -export async function getPkgReleases({ - compatibility, - lookupName, - registryUrls, -}: PkgReleaseConfig): Promise<ReleaseResult | null> { - let hostUrls = ['https://pypi.org/pypi/']; - if (is.nonEmptyArray(registryUrls)) { - hostUrls = registryUrls; - } - if (process.env.PIP_INDEX_URL) { - hostUrls = [process.env.PIP_INDEX_URL]; - } - for (let hostUrl of hostUrls) { - hostUrl += hostUrl.endsWith('/') ? '' : '/'; - let dep: ReleaseResult; - if (hostUrl.endsWith('/simple/') || hostUrl.endsWith('/+simple/')) { - dep = await getSimpleDependency(lookupName, hostUrl); - } else { - dep = await getDependency(lookupName, hostUrl, compatibility); - } - if (dep !== null) { - return dep; - } - } - return null; -} - async function getDependency( depName: string, hostUrl: string, @@ -108,6 +81,18 @@ async function getDependency( } } +function extractVersionFromLinkText( + text: string, + depName: string +): string | null { + const prefix = `${depName}-`; + const suffix = '.tar.gz'; + if (!(text.startsWith(prefix) && text.endsWith(suffix))) { + return null; + } + return text.replace(prefix, '').replace(/\.tar\.gz$/, ''); +} + async function getSimpleDependency( depName: string, hostUrl: string @@ -147,14 +132,29 @@ async function getSimpleDependency( } } -function extractVersionFromLinkText( - text: string, - depName: string -): string | null { - const prefix = `${depName}-`; - const suffix = '.tar.gz'; - if (!(text.startsWith(prefix) && text.endsWith(suffix))) { - return null; +export async function getPkgReleases({ + compatibility, + lookupName, + registryUrls, +}: PkgReleaseConfig): Promise<ReleaseResult | null> { + let hostUrls = ['https://pypi.org/pypi/']; + if (is.nonEmptyArray(registryUrls)) { + hostUrls = registryUrls; } - return text.replace(prefix, '').replace(/\.tar\.gz$/, ''); + if (process.env.PIP_INDEX_URL) { + hostUrls = [process.env.PIP_INDEX_URL]; + } + for (let hostUrl of hostUrls) { + hostUrl += hostUrl.endsWith('/') ? '' : '/'; + let dep: ReleaseResult; + if (hostUrl.endsWith('/simple/') || hostUrl.endsWith('/+simple/')) { + dep = await getSimpleDependency(lookupName, hostUrl); + } else { + dep = await getDependency(lookupName, hostUrl, compatibility); + } + if (dep !== null) { + return dep; + } + } + return null; } diff --git a/lib/datasource/sbt/index.ts b/lib/datasource/sbt/index.ts index 1e346b41ffbd2397414f8d4590e91ed737c2a77a..e0c8b18e7eab7b6f0c91f84e4ab94bb5750e5dc1 100644 --- a/lib/datasource/sbt/index.ts +++ b/lib/datasource/sbt/index.ts @@ -4,6 +4,87 @@ import { parseIndexDir, SBT_PLUGINS_REPO } from './util'; import { logger } from '../../logger'; import { PkgReleaseConfig, ReleaseResult } from '../common'; +async function resolvePackageReleases( + searchRoot: string, + artifact: string, + scalaVersion: string +): Promise<string[]> { + const indexContent = await downloadHttpProtocol(searchRoot, 'sbt'); + if (indexContent) { + const releases: string[] = []; + const parseSubdirs = (content: string) => + parseIndexDir(content, x => { + if (x === artifact) return true; + if (x.indexOf(`${artifact}_native`) === 0) return false; + if (x.indexOf(`${artifact}_sjs`) === 0) return false; + return x.indexOf(`${artifact}_`) === 0; + }); + const artifactSubdirs = parseSubdirs(indexContent); + let searchSubdirs = artifactSubdirs; + if ( + scalaVersion && + artifactSubdirs.indexOf(`${artifact}_${scalaVersion}`) !== -1 + ) { + searchSubdirs = [`${artifact}_${scalaVersion}`]; + } + const parseReleases = (content: string) => + parseIndexDir(content, x => !/^\.+$/.test(x)); + for (const searchSubdir of searchSubdirs) { + const content = await downloadHttpProtocol( + `${searchRoot}/${searchSubdir}`, + 'sbt' + ); + if (content) { + const subdirReleases = parseReleases(content); + subdirReleases.forEach(x => releases.push(x)); + } + } + if (releases.length) return [...new Set(releases)].sort(compare); + } + + return null; +} + +async function resolvePluginReleases( + rootUrl: string, + artifact: string, + scalaVersion: string +) { + const searchRoot = `${rootUrl}/${artifact}`; + const parse = (content: string) => + parseIndexDir(content, x => !/^\.+$/.test(x)); + const indexContent = await downloadHttpProtocol(searchRoot, 'sbt'); + if (indexContent) { + const releases: string[] = []; + const scalaVersionItems = parse(indexContent); + const scalaVersions = scalaVersionItems.map(x => x.replace(/^scala_/, '')); + const searchVersions = + scalaVersions.indexOf(scalaVersion) === -1 + ? scalaVersions + : [scalaVersion]; + for (const searchVersion of searchVersions) { + const searchSubRoot = `${searchRoot}/scala_${searchVersion}`; + const subRootContent = await downloadHttpProtocol(searchSubRoot, 'sbt'); + if (subRootContent) { + const sbtVersionItems = parse(subRootContent); + for (const sbtItem of sbtVersionItems) { + const releasesRoot = `${searchSubRoot}/${sbtItem}`; + const releasesIndexContent = await downloadHttpProtocol( + releasesRoot, + 'sbt' + ); + if (releasesIndexContent) { + const releasesParsed = parse(releasesIndexContent); + releasesParsed.forEach(x => releases.push(x)); + } + } + } + } + if (releases.length) return [...new Set(releases)].sort(compare); + } + return resolvePackageReleases(rootUrl, artifact, scalaVersion); +} + export async function getPkgReleases( config: PkgReleaseConfig ): Promise<ReleaseResult | null> { @@ -58,84 +139,3 @@ export async function getPkgReleases( ); return null; } - -async function resolvePluginReleases( - rootUrl: string, - artifact: string, - scalaVersion: string -) { - const searchRoot = `${rootUrl}/${artifact}`; - const parse = (content: string) => - parseIndexDir(content, x => !/^\.+$/.test(x)); - const indexContent = await downloadHttpProtocol(searchRoot, 'sbt'); - if (indexContent) { - const releases: string[] = []; - const scalaVersionItems = parse(indexContent); - const scalaVersions = scalaVersionItems.map(x => x.replace(/^scala_/, '')); - const searchVersions = - scalaVersions.indexOf(scalaVersion) === -1 - ? scalaVersions - : [scalaVersion]; - for (const searchVersion of searchVersions) { - const searchSubRoot = `${searchRoot}/scala_${searchVersion}`; - const subRootContent = await downloadHttpProtocol(searchSubRoot, 'sbt'); - if (subRootContent) { - const sbtVersionItems = parse(subRootContent); - for (const sbtItem of sbtVersionItems) { - const releasesRoot = `${searchSubRoot}/${sbtItem}`; - const releasesIndexContent = await downloadHttpProtocol( - releasesRoot, - 'sbt' - ); - if (releasesIndexContent) { - const releasesParsed = parse(releasesIndexContent); - releasesParsed.forEach(x => releases.push(x)); - } - } - } - } - if (releases.length) return [...new Set(releases)].sort(compare); - } - return resolvePackageReleases(rootUrl, artifact, scalaVersion); -} - -async function resolvePackageReleases( - searchRoot: string, - artifact: string, - scalaVersion: string -): Promise<string[]> { - const indexContent = await downloadHttpProtocol(searchRoot, 'sbt'); - if (indexContent) { - const releases: string[] = []; - const parseSubdirs = (content: string) => - parseIndexDir(content, x => { - if (x === artifact) return true; - if (x.indexOf(`${artifact}_native`) === 0) return false; - if (x.indexOf(`${artifact}_sjs`) === 0) return false; - return x.indexOf(`${artifact}_`) === 0; - }); - const artifactSubdirs = parseSubdirs(indexContent); - let searchSubdirs = artifactSubdirs; - if ( - scalaVersion && - artifactSubdirs.indexOf(`${artifact}_${scalaVersion}`) !== -1 - ) { - searchSubdirs = [`${artifact}_${scalaVersion}`]; - } - const parseReleases = (content: string) => - parseIndexDir(content, x => !/^\.+$/.test(x)); - for (const searchSubdir of searchSubdirs) { - const content = await downloadHttpProtocol( - `${searchRoot}/${searchSubdir}`, - 'sbt' - ); - if (content) { - const subdirReleases = parseReleases(content); - subdirReleases.forEach(x => releases.push(x)); - } - } - if (releases.length) return [...new Set(releases)].sort(compare); - } - - return null; -} diff --git a/lib/platform/azure/azure-helper.ts b/lib/platform/azure/azure-helper.ts index 05f3e2ba825a16ace54e2233c0b9ef7bfa821794..57f956aaf2b277fe64a994f2f76ae047b29398c8 100644 --- a/lib/platform/azure/azure-helper.ts +++ b/lib/platform/azure/azure-helper.ts @@ -71,33 +71,18 @@ export async function getAzureBranchObj( }; } -export async function getChanges( - files: { name: string; contents: any }[], - repoId: string, - branchName: string -) { - const changes = []; - for (const file of files) { - // Add or update - let changeType = 1; - const fileAlreadyThere = await getFile(repoId, file.name, branchName); - if (fileAlreadyThere) { - changeType = 2; - } - - changes.push({ - changeType, - item: { - path: file.name, - }, - newContent: { - Content: file.contents, - ContentType: 0, // RawText - }, +async function streamToString(stream: NodeJS.ReadableStream) { + const chunks: string[] = []; + /* eslint-disable promise/avoid-new */ + const p = await new Promise<string>(resolve => { + stream.on('data', (chunk: any) => { + chunks.push(chunk.toString()); }); - } - - return changes; + stream.on('end', () => { + resolve(chunks.join('')); + }); + }); + return p; } // if no branchName, look globaly @@ -144,18 +129,33 @@ export async function getFile( return null; // no file found } -async function streamToString(stream: NodeJS.ReadableStream) { - const chunks: string[] = []; - /* eslint-disable promise/avoid-new */ - const p = await new Promise<string>(resolve => { - stream.on('data', (chunk: any) => { - chunks.push(chunk.toString()); - }); - stream.on('end', () => { - resolve(chunks.join('')); +export async function getChanges( + files: { name: string; contents: any }[], + repoId: string, + branchName: string +) { + const changes = []; + for (const file of files) { + // Add or update + let changeType = 1; + const fileAlreadyThere = await getFile(repoId, file.name, branchName); + if (fileAlreadyThere) { + changeType = 2; + } + + changes.push({ + changeType, + item: { + path: file.name, + }, + newContent: { + Content: file.contents, + ContentType: 0, // RawText + }, }); - }); - return p; + } + + return changes; } export function max4000Chars(str: string) { diff --git a/lib/platform/azure/index.ts b/lib/platform/azure/index.ts index 367cb03d2bf5d556ac087c5173ec974ecb0c5242..1f3ff34a643a5878038837881e3d9ca4962ae1b8 100644 --- a/lib/platform/azure/index.ts +++ b/lib/platform/azure/index.ts @@ -63,6 +63,15 @@ export async function getRepos() { return repos.map(repo => `${repo.project!.name}/${repo.name}`); } +async function getBranchCommit(fullBranchName: string) { + const azureApiGit = await azureApi.gitApi(); + const commit = await azureApiGit.getBranch( + config.repoId, + azureHelper.getBranchNameWithoutRefsheadsPrefix(fullBranchName)! + ); + return commit.commit!.commitId; +} + export async function initRepo({ repository, localDir, @@ -136,6 +145,12 @@ export function getRepoForceRebase() { return false; } +// Search + +export /* istanbul ignore next */ function getFileList(branchName: string) { + return config.storage.getFileList(branchName); +} + export /* istanbul ignore next */ async function setBaseBranch( branchName = config.baseBranch ) { @@ -153,12 +168,6 @@ export /* istanbul ignore next */ function setBranchPrefix( return config.storage.setBranchPrefix(branchPrefix); } -// Search - -export /* istanbul ignore next */ function getFileList(branchName: string) { - return config.storage.getFileList(branchName); -} - // Branch export /* istanbul ignore next */ function branchExists(branchName: string) { @@ -182,60 +191,40 @@ export /* istanbul ignore next */ function getFile( return config.storage.getFile(filePath, branchName); } -export /* istanbul ignore next */ async function deleteBranch( - branchName: string, - abandonAssociatedPr = false -) { - await config.storage.deleteBranch(branchName); - if (abandonAssociatedPr) { - const pr = await getBranchPr(branchName); - await abandonPr(pr.number); - } -} - -export /* istanbul ignore next */ function getBranchLastCommitTime( - branchName: string -) { - return config.storage.getBranchLastCommitTime(branchName); -} - -export /* istanbul ignore next */ function getRepoStatus() { - return config.storage.getRepoStatus(); -} - -export /* istanbul ignore next */ function mergeBranch(branchName: string) { - return config.storage.mergeBranch(branchName); -} - -export /* istanbul ignore next */ function commitFilesToBranch( - branchName: string, - files: any[], - message: string, - parentBranch = config.baseBranch -) { - return config.storage.commitFilesToBranch( - branchName, - files, - message, - parentBranch +// istanbul ignore next +async function abandonPr(prNo: number) { + logger.debug(`abandonPr(prNo)(${prNo})`); + const azureApiGit = await azureApi.gitApi(); + await azureApiGit.updatePullRequest( + { + status: 2, + }, + config.repoId, + prNo ); } -export /* istanbul ignore next */ function getCommitMessages() { - return config.storage.getCommitMessages(); -} - -async function getBranchCommit(fullBranchName: string) { +export async function getPr(pullRequestId: number) { + logger.debug(`getPr(${pullRequestId})`); + if (!pullRequestId) { + return null; + } const azureApiGit = await azureApi.gitApi(); - const commit = await azureApiGit.getBranch( + const prs = await azureApiGit.getPullRequests(config.repoId, { status: 4 }); + const azurePr: any = prs.find(item => item.pullRequestId === pullRequestId); + if (!azurePr) { + return null; + } + const labels = await azureApiGit.getPullRequestLabels( config.repoId, - azureHelper.getBranchNameWithoutRefsheadsPrefix(fullBranchName)! + pullRequestId ); - return commit.commit!.commitId; -} - -export function getPrList() { - return []; + azurePr.labels = labels + .filter(label => label.active) + .map(label => label.name); + logger.debug(`pr: (${azurePr})`); + const pr = azureHelper.getRenovatePRFormat(azurePr); + return pr; } export async function findPr( @@ -288,22 +277,51 @@ export async function getBranchPr(branchName: string) { return existingPr ? getPr(existingPr.pullRequestId) : null; } -export async function getBranchStatus( +export /* istanbul ignore next */ async function deleteBranch( branchName: string, - requiredStatusChecks: any + abandonAssociatedPr = false ) { - logger.debug(`getBranchStatus(${branchName})`); - if (!requiredStatusChecks) { - // null means disable status checks, so it always succeeds - return 'success'; - } - if (requiredStatusChecks.length) { - // This is Unsupported - logger.warn({ requiredStatusChecks }, `Unsupported requiredStatusChecks`); - return 'failed'; + await config.storage.deleteBranch(branchName); + if (abandonAssociatedPr) { + const pr = await getBranchPr(branchName); + await abandonPr(pr.number); } - const branchStatusCheck = await getBranchStatusCheck(branchName); - return branchStatusCheck; +} + +export /* istanbul ignore next */ function getBranchLastCommitTime( + branchName: string +) { + return config.storage.getBranchLastCommitTime(branchName); +} + +export /* istanbul ignore next */ function getRepoStatus() { + return config.storage.getRepoStatus(); +} + +export /* istanbul ignore next */ function mergeBranch(branchName: string) { + return config.storage.mergeBranch(branchName); +} + +export /* istanbul ignore next */ function commitFilesToBranch( + branchName: string, + files: any[], + message: string, + parentBranch = config.baseBranch +) { + return config.storage.commitFilesToBranch( + branchName, + files, + message, + parentBranch + ); +} + +export /* istanbul ignore next */ function getCommitMessages() { + return config.storage.getCommitMessages(); +} + +export function getPrList() { + return []; } export async function getBranchStatusCheck( @@ -322,27 +340,22 @@ export async function getBranchStatusCheck( return 'pending'; } -export async function getPr(pullRequestId: number) { - logger.debug(`getPr(${pullRequestId})`); - if (!pullRequestId) { - return null; +export async function getBranchStatus( + branchName: string, + requiredStatusChecks: any +) { + logger.debug(`getBranchStatus(${branchName})`); + if (!requiredStatusChecks) { + // null means disable status checks, so it always succeeds + return 'success'; } - const azureApiGit = await azureApi.gitApi(); - const prs = await azureApiGit.getPullRequests(config.repoId, { status: 4 }); - const azurePr: any = prs.find(item => item.pullRequestId === pullRequestId); - if (!azurePr) { - return null; + if (requiredStatusChecks.length) { + // This is Unsupported + logger.warn({ requiredStatusChecks }, `Unsupported requiredStatusChecks`); + return 'failed'; } - const labels = await azureApiGit.getPullRequestLabels( - config.repoId, - pullRequestId - ); - azurePr.labels = labels - .filter(label => label.active) - .map(label => label.name); - logger.debug(`pr: (${azurePr})`); - const pr = azureHelper.getRenovatePRFormat(azurePr); - return pr; + const branchStatusCheck = await getBranchStatusCheck(branchName); + return branchStatusCheck; } export async function createPr( @@ -458,19 +471,6 @@ export async function ensureCommentRemoval(issueNo: number, topic: string) { } } -// istanbul ignore next -async function abandonPr(prNo: number) { - logger.debug(`abandonPr(prNo)(${prNo})`); - const azureApiGit = await azureApi.gitApi(); - await azureApiGit.updatePullRequest( - { - status: 2, - }, - config.repoId, - prNo - ); -} - export function setBranchStatus( branchName: string, context: string, diff --git a/lib/platform/bitbucket-server/index.ts b/lib/platform/bitbucket-server/index.ts index 56901a18c2a848ff29a342de3654ee8aa1a6433e..5d8cd1efa3d124f96cf03d26a812c2f156dce820 100644 --- a/lib/platform/bitbucket-server/index.ts +++ b/lib/platform/bitbucket-server/index.ts @@ -244,6 +244,139 @@ export function branchExists(branchName: string) { return config.storage.branchExists(branchName); } +export function isBranchStale(branchName: string) { + logger.debug(`isBranchStale(${branchName})`); + return config.storage.isBranchStale(branchName); +} + +// Gets details for a PR +export async function getPr(prNo: number, refreshCache?: boolean) { + logger.debug(`getPr(${prNo})`); + if (!prNo) { + return null; + } + + const res = await api.get( + `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}`, + { useCache: !refreshCache } + ); + + const pr: any = { + displayNumber: `Pull Request #${res.body.id}`, + ...utils.prInfo(res.body), + reviewers: res.body.reviewers.map( + (r: { user: { name: any } }) => r.user.name + ), + isModified: false, + }; + + pr.version = updatePrVersion(pr.number, pr.version); + + if (pr.state === 'open') { + const mergeRes = await api.get( + `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/merge`, + { useCache: !refreshCache } + ); + pr.isConflicted = !!mergeRes.body.conflicted; + pr.canMerge = !!mergeRes.body.canMerge; + + const prCommits = (await api.get( + `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/commits?withCounts=true`, + { useCache: !refreshCache } + )).body; + + if (prCommits.totalCount === 1) { + if (global.gitAuthor) { + const commitAuthorEmail = prCommits.values[0].author.emailAddress; + if (commitAuthorEmail !== global.gitAuthor.email) { + logger.debug( + { prNo }, + 'PR is modified: 1 commit but not by configured gitAuthor' + ); + pr.isModified = true; + } + } + } else { + logger.debug( + { prNo }, + `PR is modified: Found ${prCommits.totalCount} commits` + ); + pr.isModified = true; + } + } + + if (await branchExists(pr.branchName)) { + pr.isStale = await isBranchStale(pr.branchName); + } + + return pr; +} + +// TODO: coverage +// istanbul ignore next +function matchesState(state: string, desiredState: string) { + if (desiredState === 'all') { + return true; + } + if (desiredState[0] === '!') { + return state !== desiredState.substring(1); + } + return state === desiredState; +} + +// TODO: coverage +// istanbul ignore next +const isRelevantPr = ( + branchName: string, + prTitle: string | null | undefined, + state: string +) => (p: { branchName: string; title: string; state: string }) => + p.branchName === branchName && + (!prTitle || p.title === prTitle) && + matchesState(p.state, state); + +// TODO: coverage +// eslint-disable-next-line @typescript-eslint/no-unused-vars +export async function getPrList(_args?: any) { + logger.debug(`getPrList()`); + // istanbul ignore next + if (!config.prList) { + const query = new URLSearchParams({ + state: 'ALL', + 'role.1': 'AUTHOR', + 'username.1': config.username, + }).toString(); + const values = await utils.accumulateValues( + `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests?${query}` + ); + + config.prList = values.map(utils.prInfo); + logger.info({ length: config.prList.length }, 'Retrieved Pull Requests'); + } else { + logger.debug('returning cached PR list'); + } + return config.prList; +} + +// TODO: coverage +// istanbul ignore next +export async function findPr( + branchName: string, + prTitle?: string, + state = 'all', + refreshCache?: boolean +) { + logger.debug(`findPr(${branchName}, "${prTitle}", "${state}")`); + const prList = await getPrList({ refreshCache }); + const pr = prList.find(isRelevantPr(branchName, prTitle, state)); + if (pr) { + logger.debug(`Found PR #${pr.number}`); + } else { + logger.debug(`DID NOT Found PR from branch #${branchName}`); + } + return pr; +} + // Returns the Pull Request for a branch. Null if not exists. export async function getBranchPr(branchName: string, refreshCache?: boolean) { logger.debug(`getBranchPr(${branchName})`); @@ -256,11 +389,6 @@ export function getAllRenovateBranches(branchPrefix: string) { return config.storage.getAllRenovateBranches(branchPrefix); } -export function isBranchStale(branchName: string) { - logger.debug(`isBranchStale(${branchName})`); - return config.storage.isBranchStale(branchName); -} - export async function commitFilesToBranch( branchName: string, files: any[], @@ -656,71 +784,6 @@ export async function ensureCommentRemoval(prNo: number, topic: string) { } } -// TODO: coverage -// eslint-disable-next-line @typescript-eslint/no-unused-vars -export async function getPrList(_args?: any) { - logger.debug(`getPrList()`); - // istanbul ignore next - if (!config.prList) { - const query = new URLSearchParams({ - state: 'ALL', - 'role.1': 'AUTHOR', - 'username.1': config.username, - }).toString(); - const values = await utils.accumulateValues( - `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests?${query}` - ); - - config.prList = values.map(utils.prInfo); - logger.info({ length: config.prList.length }, 'Retrieved Pull Requests'); - } else { - logger.debug('returning cached PR list'); - } - return config.prList; -} - -// TODO: coverage -// istanbul ignore next -function matchesState(state: string, desiredState: string) { - if (desiredState === 'all') { - return true; - } - if (desiredState[0] === '!') { - return state !== desiredState.substring(1); - } - return state === desiredState; -} - -// TODO: coverage -// istanbul ignore next -const isRelevantPr = ( - branchName: string, - prTitle: string | null | undefined, - state: string -) => (p: { branchName: string; title: string; state: string }) => - p.branchName === branchName && - (!prTitle || p.title === prTitle) && - matchesState(p.state, state); - -// TODO: coverage -// istanbul ignore next -export async function findPr( - branchName: string, - prTitle?: string, - state = 'all', - refreshCache?: boolean -) { - logger.debug(`findPr(${branchName}, "${prTitle}", "${state}")`); - const prList = await getPrList({ refreshCache }); - const pr = prList.find(isRelevantPr(branchName, prTitle, state)); - if (pr) { - logger.debug(`Found PR #${pr.number}`); - } else { - logger.debug(`DID NOT Found PR from branch #${branchName}`); - } - return pr; -} - // Pull Request const escapeHash = input => (input ? input.replace(/#/g, '%23') : input); @@ -808,69 +871,6 @@ export async function createPr( return pr; } -// Gets details for a PR -export async function getPr(prNo: number, refreshCache?: boolean) { - logger.debug(`getPr(${prNo})`); - if (!prNo) { - return null; - } - - const res = await api.get( - `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}`, - { useCache: !refreshCache } - ); - - const pr: any = { - displayNumber: `Pull Request #${res.body.id}`, - ...utils.prInfo(res.body), - reviewers: res.body.reviewers.map( - (r: { user: { name: any } }) => r.user.name - ), - isModified: false, - }; - - pr.version = updatePrVersion(pr.number, pr.version); - - if (pr.state === 'open') { - const mergeRes = await api.get( - `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/merge`, - { useCache: !refreshCache } - ); - pr.isConflicted = !!mergeRes.body.conflicted; - pr.canMerge = !!mergeRes.body.canMerge; - - const prCommits = (await api.get( - `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/commits?withCounts=true`, - { useCache: !refreshCache } - )).body; - - if (prCommits.totalCount === 1) { - if (global.gitAuthor) { - const commitAuthorEmail = prCommits.values[0].author.emailAddress; - if (commitAuthorEmail !== global.gitAuthor.email) { - logger.debug( - { prNo }, - 'PR is modified: 1 commit but not by configured gitAuthor' - ); - pr.isModified = true; - } - } - } else { - logger.debug( - { prNo }, - `PR is modified: Found ${prCommits.totalCount} commits` - ); - pr.isModified = true; - } - } - - if (await branchExists(pr.branchName)) { - pr.isStale = await isBranchStale(pr.branchName); - } - - return pr; -} - // Return a list of all modified files in a PR // https://docs.atlassian.com/bitbucket-server/rest/6.0.0/bitbucket-rest.html export async function getPrFiles(prNo: number) { diff --git a/lib/platform/bitbucket/index.ts b/lib/platform/bitbucket/index.ts index 0044ea30642c2063ff44efaf4a15dc32440dece7..1b484c6904f5129199b5d5a98498e05f88087548 100644 --- a/lib/platform/bitbucket/index.ts +++ b/lib/platform/bitbucket/index.ts @@ -138,6 +138,13 @@ export function getRepoForceRebase() { return false; } +// Search + +// Get full file list +export function getFileList(branchName?: string) { + return config.storage.getFileList(branchName); +} + export async function setBaseBranch(branchName = config.baseBranch) { logger.debug(`Setting baseBranch to ${branchName}`); config.baseBranch = branchName; @@ -153,13 +160,6 @@ export /* istanbul ignore next */ function setBranchPrefix( return config.storage.setBranchPrefix(branchPrefix); } -// Search - -// Get full file list -export function getFileList(branchName?: string) { - return config.storage.getFileList(branchName); -} - // Branch // Returns true if branch exists, otherwise false @@ -179,6 +179,49 @@ export function getFile(filePath: string, branchName?: string) { return config.storage.getFile(filePath, branchName); } +// istanbul ignore next +function matchesState(state: string, desiredState: string) { + if (desiredState === 'all') { + return true; + } + if (desiredState[0] === '!') { + return state !== desiredState.substring(1); + } + return state === desiredState; +} + +export async function getPrList() { + logger.debug('getPrList()'); + if (!config.prList) { + logger.debug('Retrieving PR list'); + let url = `/2.0/repositories/${config.repository}/pullrequests?`; + url += utils.prStates.all.map(state => 'state=' + state).join('&'); + const prs = await utils.accumulateValues(url, undefined, undefined, 50); + config.prList = prs.map(utils.prInfo); + logger.info({ length: config.prList.length }, 'Retrieved Pull Requests'); + } + return config.prList; +} + +export async function findPr( + branchName: string, + prTitle?: string | null, + state = 'all' +) { + logger.debug(`findPr(${branchName}, ${prTitle}, ${state})`); + const prList = await getPrList(); + const pr = prList.find( + (p: { branchName: string; title: string; state: string }) => + p.branchName === branchName && + (!prTitle || p.title === prTitle) && + matchesState(p.state, state) + ); + if (pr) { + logger.debug(`Found PR #${pr.number}`); + } + return pr; +} + export async function deleteBranch(branchName: string, closePr?: boolean) { if (closePr) { const pr = await findPr(branchName, null, 'open'); @@ -222,6 +265,88 @@ export function getCommitMessages() { return config.storage.getCommitMessages(); } +async function isPrConflicted(prNo: number) { + const diff = (await api.get( + `/2.0/repositories/${config.repository}/pullrequests/${prNo}/diff`, + { json: false } as any + )).body; + + return utils.isConflicted(parseDiff(diff)); +} + +// Gets details for a PR +export async function getPr(prNo: number) { + const pr = (await api.get( + `/2.0/repositories/${config.repository}/pullrequests/${prNo}` + )).body; + + // istanbul ignore if + if (!pr) { + return null; + } + + const res: any = { + displayNumber: `Pull Request #${pr.id}`, + ...utils.prInfo(pr), + isModified: false, + }; + + if (utils.prStates.open.includes(pr.state)) { + res.isConflicted = await isPrConflicted(prNo); + + // TODO: Is that correct? Should we check getBranchStatus like gitlab? + res.canMerge = !res.isConflicted; + + // we only want the first two commits, because size tells us the overall number + const url = pr.links.commits.href + '?pagelen=2'; + const { body } = await api.get<utils.PagedResult<Commit>>(url); + const size = body.size || body.values.length; + + // istanbul ignore if + if (size === undefined) { + logger.warn({ prNo, url, body }, 'invalid response so can rebase'); + } else if (size === 1) { + if (global.gitAuthor) { + const author = addrs.parseOneAddress( + body.values[0].author.raw + ) as addrs.ParsedMailbox; + if (author.address !== global.gitAuthor.email) { + logger.debug( + { prNo }, + 'PR is modified: 1 commit but not by configured gitAuthor' + ); + res.isModified = true; + } + } + } else { + logger.debug({ prNo }, `PR is modified: Found ${size} commits`); + res.isModified = true; + } + } + if (await branchExists(pr.source.branch.name)) { + res.isStale = await isBranchStale(pr.source.branch.name); + } + + return res; +} + +const escapeHash = input => (input ? input.replace(/#/g, '%23') : input); + +// Return the commit SHA for a branch +async function getBranchCommit(branchName: string) { + try { + const branch = (await api.get( + `/2.0/repositories/${config.repository}/refs/branches/${escapeHash( + branchName + )}` + )).body; + return branch.target.hash; + } catch (err) /* istanbul ignore next */ { + logger.debug({ err }, `getBranchCommit('${branchName}') failed'`); + return null; + } +} + // Returns the Pull Request for a branch. Null if not exists. export async function getBranchPr(branchName: string) { logger.debug(`getBranchPr(${branchName})`); @@ -365,6 +490,15 @@ async function closeIssue(issueNumber: number) { ); } +export function getPrBody(input: string) { + // Remove any HTML we use + return smartTruncate(input, 50000) + .replace(/<\/?summary>/g, '**') + .replace(/<\/?details>/g, '') + .replace(new RegExp(`\n---\n\n.*?<!-- ${appSlug}-rebase -->.*?\n`), '') + .replace(/\]\(\.\.\/pull\//g, '](../../pull-requests/'); +} + export async function ensureIssue(title: string, body: string) { logger.debug(`ensureIssue()`); const description = getPrBody(sanitize(body)); @@ -497,36 +631,6 @@ export function ensureCommentRemoval(prNo: number, topic: string) { return comments.ensureCommentRemoval(config, prNo, topic); } -// istanbul ignore next -function matchesState(state: string, desiredState: string) { - if (desiredState === 'all') { - return true; - } - if (desiredState[0] === '!') { - return state !== desiredState.substring(1); - } - return state === desiredState; -} - -export async function findPr( - branchName: string, - prTitle?: string | null, - state = 'all' -) { - logger.debug(`findPr(${branchName}, ${prTitle}, ${state})`); - const prList = await getPrList(); - const pr = prList.find( - (p: { branchName: string; title: string; state: string }) => - p.branchName === branchName && - (!prTitle || p.title === prTitle) && - matchesState(p.state, state) - ); - if (pr) { - logger.debug(`Found PR #${pr.number}`); - } - return pr; -} - // Creates PR and returns PR number export async function createPr( branchName: string, @@ -587,15 +691,6 @@ export async function createPr( return pr; } -async function isPrConflicted(prNo: number) { - const diff = (await api.get( - `/2.0/repositories/${config.repository}/pullrequests/${prNo}/diff`, - { json: false } as any - )).body; - - return utils.isConflicted(parseDiff(diff)); -} - interface Reviewer { uuid: { raw: string }; } @@ -603,61 +698,6 @@ interface Reviewer { interface Commit { author: { raw: string }; } -// Gets details for a PR -export async function getPr(prNo: number) { - const pr = (await api.get( - `/2.0/repositories/${config.repository}/pullrequests/${prNo}` - )).body; - - // istanbul ignore if - if (!pr) { - return null; - } - - const res: any = { - displayNumber: `Pull Request #${pr.id}`, - ...utils.prInfo(pr), - isModified: false, - }; - - if (utils.prStates.open.includes(pr.state)) { - res.isConflicted = await isPrConflicted(prNo); - - // TODO: Is that correct? Should we check getBranchStatus like gitlab? - res.canMerge = !res.isConflicted; - - // we only want the first two commits, because size tells us the overall number - const url = pr.links.commits.href + '?pagelen=2'; - const { body } = await api.get<utils.PagedResult<Commit>>(url); - const size = body.size || body.values.length; - - // istanbul ignore if - if (size === undefined) { - logger.warn({ prNo, url, body }, 'invalid response so can rebase'); - } else if (size === 1) { - if (global.gitAuthor) { - const author = addrs.parseOneAddress( - body.values[0].author.raw - ) as addrs.ParsedMailbox; - if (author.address !== global.gitAuthor.email) { - logger.debug( - { prNo }, - 'PR is modified: 1 commit but not by configured gitAuthor' - ); - res.isModified = true; - } - } - } else { - logger.debug({ prNo }, `PR is modified: Found ${size} commits`); - res.isModified = true; - } - } - if (await branchExists(pr.source.branch.name)) { - res.isStale = await isBranchStale(pr.source.branch.name); - } - - return res; -} // Return a list of all modified files in a PR export async function getPrFiles(prNo: number) { @@ -703,47 +743,8 @@ export async function mergePr(prNo: number, branchName: string) { return true; } -export function getPrBody(input: string) { - // Remove any HTML we use - return smartTruncate(input, 50000) - .replace(/<\/?summary>/g, '**') - .replace(/<\/?details>/g, '') - .replace(new RegExp(`\n---\n\n.*?<!-- ${appSlug}-rebase -->.*?\n`), '') - .replace(/\]\(\.\.\/pull\//g, '](../../pull-requests/'); -} - -const escapeHash = input => (input ? input.replace(/#/g, '%23') : input); - -// Return the commit SHA for a branch -async function getBranchCommit(branchName: string) { - try { - const branch = (await api.get( - `/2.0/repositories/${config.repository}/refs/branches/${escapeHash( - branchName - )}` - )).body; - return branch.target.hash; - } catch (err) /* istanbul ignore next */ { - logger.debug({ err }, `getBranchCommit('${branchName}') failed'`); - return null; - } -} - // Pull Request -export async function getPrList() { - logger.debug('getPrList()'); - if (!config.prList) { - logger.debug('Retrieving PR list'); - let url = `/2.0/repositories/${config.repository}/pullrequests?`; - url += utils.prStates.all.map(state => 'state=' + state).join('&'); - const prs = await utils.accumulateValues(url, undefined, undefined, 50); - config.prList = prs.map(utils.prInfo); - logger.info({ length: config.prList.length }, 'Retrieved Pull Requests'); - } - return config.prList; -} - export function cleanRepo() { // istanbul ignore if if (config.storage && config.storage.cleanRepo) { diff --git a/lib/platform/git/storage.ts b/lib/platform/git/storage.ts index 401b9c0efa196cdb3ab242f0f465ec877419d333..f18d14bc07e6051774bd1c34bbb5902c97fbcc09 100644 --- a/lib/platform/git/storage.ts +++ b/lib/platform/git/storage.ts @@ -25,6 +25,40 @@ interface LocalConfig extends StorageConfig { branchPrefix: string; } +// istanbul ignore next +function checkForPlatformFailure(err: Error) { + if (process.env.NODE_ENV === 'test') { + return; + } + const platformFailureStrings = [ + 'remote: Invalid username or password', + 'gnutls_handshake() failed', + 'The requested URL returned error: 5', + 'The remote end hung up unexpectedly', + 'access denied or repository not exported', + 'Could not write new index file', + 'Failed to connect to', + 'Connection timed out', + ]; + for (const errorStr of platformFailureStrings) { + if (err.message.includes(errorStr)) { + throw new Error('platform-failure'); + } + } +} + +function localName(branchName: string) { + return branchName.replace(/^origin\//, ''); +} + +function throwBaseBranchValidationError(branchName) { + const error = new Error('config-validation'); + error.validationError = 'baseBranch not found'; + error.validationMessage = + 'The following configured baseBranch could not be found: ' + branchName; + throw error; +} + export class Storage { private _config: LocalConfig = {} as any; @@ -489,38 +523,4 @@ export class Storage { } } -function localName(branchName: string) { - return branchName.replace(/^origin\//, ''); -} - -// istanbul ignore next -function checkForPlatformFailure(err: Error) { - if (process.env.NODE_ENV === 'test') { - return; - } - const platformFailureStrings = [ - 'remote: Invalid username or password', - 'gnutls_handshake() failed', - 'The requested URL returned error: 5', - 'The remote end hung up unexpectedly', - 'access denied or repository not exported', - 'Could not write new index file', - 'Failed to connect to', - 'Connection timed out', - ]; - for (const errorStr of platformFailureStrings) { - if (err.message.includes(errorStr)) { - throw new Error('platform-failure'); - } - } -} - -function throwBaseBranchValidationError(branchName) { - const error = new Error('config-validation'); - error.validationError = 'baseBranch not found'; - error.validationMessage = - 'The following configured baseBranch could not be found: ' + branchName; - throw error; -} - export default Storage; diff --git a/lib/platform/github/index.ts b/lib/platform/github/index.ts index 0bfc58a4acfe0333aac4bc30b5602ff3e6381582..cf9dfd8e4d808f7e3bc0307d7f119cb452926fb1 100644 --- a/lib/platform/github/index.ts +++ b/lib/platform/github/index.ts @@ -150,6 +150,43 @@ export function cleanRepo() { config = {} as any; } +async function getBranchProtection(branchName: string) { + // istanbul ignore if + if (config.parentRepo) { + return {}; + } + const res = await api.get( + `repos/${config.repository}/branches/${escapeHash(branchName)}/protection` + ); + return res.body; +} + +// Return the commit SHA for a branch +async function getBranchCommit(branchName: string) { + try { + const res = await api.get( + `repos/${config.repository}/git/refs/heads/${escapeHash(branchName)}` + ); + return res.body.object.sha; + } catch (err) /* istanbul ignore next */ { + logger.debug({ err }, 'Error getting branch commit'); + if (err.statusCode === 404) { + throw new Error('repository-changed'); + } + if (err.statusCode === 409) { + throw new Error('empty'); + } + throw err; + } +} + +async function getBaseCommitSHA() { + if (!config.baseCommitSHA) { + config.baseCommitSHA = await getBranchCommit(config.baseBranch); + } + return config.baseCommitSHA; +} + // Initialize GitHub by getting base branch and SHA export async function initRepo({ endpoint, @@ -429,43 +466,6 @@ export async function getRepoForceRebase() { return config.repoForceRebase; } -// Return the commit SHA for a branch -async function getBranchCommit(branchName: string) { - try { - const res = await api.get( - `repos/${config.repository}/git/refs/heads/${escapeHash(branchName)}` - ); - return res.body.object.sha; - } catch (err) /* istanbul ignore next */ { - logger.debug({ err }, 'Error getting branch commit'); - if (err.statusCode === 404) { - throw new Error('repository-changed'); - } - if (err.statusCode === 409) { - throw new Error('empty'); - } - throw err; - } -} - -async function getBaseCommitSHA() { - if (!config.baseCommitSHA) { - config.baseCommitSHA = await getBranchCommit(config.baseBranch); - } - return config.baseCommitSHA; -} - -async function getBranchProtection(branchName: string) { - // istanbul ignore if - if (config.parentRepo) { - return {}; - } - const res = await api.get( - `repos/${config.repository}/branches/${escapeHash(branchName)}/protection` - ); - return res.body; -} - // istanbul ignore next export async function setBaseBranch(branchName = config.baseBranch) { config.baseBranch = branchName; @@ -553,572 +553,359 @@ export function getCommitMessages() { return config.storage.getCommitMessages(); } -// Returns the Pull Request for a branch. Null if not exists. -export async function getBranchPr(branchName: string) { - logger.debug(`getBranchPr(${branchName})`); - const existingPr = await findPr(branchName, null, 'open'); - return existingPr ? getPr(existingPr.number) : null; -} - -// Returns the combined status for a branch. -export async function getBranchStatus( - branchName: string, - requiredStatusChecks: any -) { - logger.debug(`getBranchStatus(${branchName})`); - if (!requiredStatusChecks) { - // null means disable status checks, so it always succeeds - logger.debug('Status checks disabled = returning "success"'); - return 'success'; - } - if (requiredStatusChecks.length) { - // This is Unsupported - logger.warn({ requiredStatusChecks }, `Unsupported requiredStatusChecks`); - return 'failed'; - } - const commitStatusUrl = `repos/${config.repository}/commits/${escapeHash( - branchName - )}/status`; - let commitStatus; - try { - commitStatus = (await api.get(commitStatusUrl)).body; - } catch (err) /* istanbul ignore next */ { - if (err.statusCode === 404) { - logger.info( - 'Received 404 when checking branch status, assuming that branch has been deleted' - ); - throw new Error('repository-changed'); - } - logger.info('Unknown error when checking branch status'); - throw err; - } - logger.debug( - { state: commitStatus.state, statuses: commitStatus.statuses }, - 'branch status check result' - ); - let checkRuns: { name: string; status: string; conclusion: string }[] = []; - if (!config.isGhe) { +async function getClosedPrs() { + if (!config.closedPrList) { + config.closedPrList = {}; + let query; try { - const checkRunsUrl = `repos/${config.repository}/commits/${escapeHash( - branchName - )}/check-runs`; - const opts = { - headers: { - Accept: 'application/vnd.github.antiope-preview+json', - }, + const url = 'graphql'; + // prettier-ignore + query = ` + query { + repository(owner: "${config.repositoryOwner}", name: "${config.repositoryName}") { + pullRequests(states: [CLOSED, MERGED], first: 100, orderBy: {field: UPDATED_AT, direction: DESC}) { + nodes { + number + state + headRefName + title + comments(last: 100) { + nodes { + databaseId + body + } + } + } + } + } + } + `; + const options = { + body: JSON.stringify({ query }), + json: false, }; - const checkRunsRaw = (await api.get(checkRunsUrl, opts)).body; - if (checkRunsRaw.check_runs && checkRunsRaw.check_runs.length) { - checkRuns = checkRunsRaw.check_runs.map( - (run: { name: string; status: string; conclusion: string }) => ({ - name: run.name, - status: run.status, - conclusion: run.conclusion, + const res = JSON.parse((await api.post(url, options)).body); + const prNumbers: number[] = []; + // istanbul ignore if + if (!res.data) { + logger.info( + { query, res }, + 'No graphql res.data, returning empty list' + ); + return {}; + } + for (const pr of res.data.repository.pullRequests.nodes) { + // https://developer.github.com/v4/object/pullrequest/ + pr.displayNumber = `Pull Request #${pr.number}`; + pr.state = pr.state.toLowerCase(); + pr.branchName = pr.headRefName; + delete pr.headRefName; + pr.comments = pr.comments.nodes.map( + (comment: { databaseId: number; body: string }) => ({ + id: comment.databaseId, + body: comment.body, }) ); - logger.debug({ checkRuns }, 'check runs result'); - } else { - // istanbul ignore next - logger.debug({ result: checkRunsRaw }, 'No check runs found'); + pr.body = 'dummy body'; // just in case + config.closedPrList[pr.number] = pr; + prNumbers.push(pr.number); } + prNumbers.sort(); + logger.debug({ prNumbers }, 'Retrieved closed PR list with graphql'); } catch (err) /* istanbul ignore next */ { - if (err.message === 'platform-failure') { - throw err; - } - if ( - err.statusCode === 403 || - err.message === 'integration-unauthorized' - ) { - logger.info('No permission to view check runs'); - } else { - logger.warn({ err }, 'Error retrieving check runs'); - } - } - } - if (checkRuns.length === 0) { - return commitStatus.state; - } - if ( - commitStatus.state === 'failed' || - checkRuns.some(run => run.conclusion === 'failed') - ) { - return 'failed'; - } - if ( - (commitStatus.state === 'success' || commitStatus.statuses.length === 0) && - checkRuns.every(run => ['neutral', 'success'].includes(run.conclusion)) - ) { - return 'success'; - } - return 'pending'; -} - -export async function getBranchStatusCheck( - branchName: string, - context: string -) { - const branchCommit = await config.storage.getBranchCommit(branchName); - const url = `repos/${config.repository}/commits/${branchCommit}/statuses`; - try { - const res = await api.get(url); - for (const check of res.body) { - if (check.context === context) { - return check.state; - } - } - return null; - } catch (err) /* istanbul ignore next */ { - if (err.statusCode === 404) { - logger.info('Commit not found when checking statuses'); - throw new Error('repository-changed'); + logger.warn({ query, err }, 'getClosedPrs error'); } - throw err; } + return config.closedPrList; } -export async function setBranchStatus( - branchName: string, - context: string, - description: string, - state: string, - targetUrl?: string -) { +async function getOpenPrs() { // istanbul ignore if - if (config.parentRepo) { - logger.info('Cannot set branch status when in forking mode'); - return; - } - const existingStatus = await getBranchStatusCheck(branchName, context); - if (existingStatus === state) { - return; - } - logger.info({ branch: branchName, context, state }, 'Setting branch status'); - const branchCommit = await config.storage.getBranchCommit(branchName); - const url = `repos/${config.repository}/statuses/${branchCommit}`; - const options: any = { - state, - description, - context, - }; - if (targetUrl) { - options.target_url = targetUrl; + if (config.isGhe) { + logger.debug( + 'Skipping unsupported graphql PullRequests.mergeStateStatus query on GHE' + ); + return {}; } - await api.post(url, { body: options }); -} - -// Issue - -/* istanbul ignore next */ -async function getGraphqlIssues(afterCursor: string | null = null) { - const url = 'graphql'; - const headers = { - accept: 'application/vnd.github.merge-info-preview+json', - }; - // prettier-ignore - const query = ` - query { - repository(owner: "${config.repositoryOwner}", name: "${config.repositoryName}") { - issues(first: 100, after:${afterCursor}, orderBy: {field: UPDATED_AT, direction: DESC}, filterBy: {createdBy: "${config.renovateUsername}"}) { - pageInfo { - startCursor - hasNextPage + if (!config.openPrList) { + config.openPrList = {}; + let query; + try { + const url = 'graphql'; + // https://developer.github.com/v4/previews/#mergeinfopreview---more-detailed-information-about-a-pull-requests-merge-state + const headers = { + accept: 'application/vnd.github.merge-info-preview+json', + }; + // prettier-ignore + query = ` + query { + repository(owner: "${config.repositoryOwner}", name: "${config.repositoryName}") { + pullRequests(states: [OPEN], first: 100, orderBy: {field: UPDATED_AT, direction: DESC}) { + nodes { + number + headRefName + baseRefName + title + mergeable + mergeStateStatus + labels(last: 100) { + nodes { + name + } + } + commits(first: 2) { + nodes { + commit { + author { + email + } + committer { + email + } + parents(last: 1) { + edges { + node { + abbreviatedOid + oid + } + } + } + } + } + } + body + reviews(first: 1, states:[CHANGES_REQUESTED]){ + nodes{ + state + } + } + } + } } - nodes { - number - state - title - body + } + `; + const options = { + headers, + body: JSON.stringify({ query }), + json: false, + }; + const res = JSON.parse((await api.post(url, options)).body); + const prNumbers: number[] = []; + // istanbul ignore if + if (!res.data) { + logger.info({ query, res }, 'No graphql res.data'); + return {}; + } + for (const pr of res.data.repository.pullRequests.nodes) { + // https://developer.github.com/v4/object/pullrequest/ + pr.displayNumber = `Pull Request #${pr.number}`; + pr.state = 'open'; + pr.branchName = pr.headRefName; + const branchName = pr.branchName; + const prNo = pr.number; + delete pr.headRefName; + pr.targetBranch = pr.baseRefName; + delete pr.baseRefName; + // https://developer.github.com/v4/enum/mergeablestate + const canMergeStates = ['BEHIND', 'CLEAN']; + const hasNegativeReview = + pr.reviews && pr.reviews.nodes && pr.reviews.nodes.length > 0; + pr.canMerge = + canMergeStates.includes(pr.mergeStateStatus) && !hasNegativeReview; + // https://developer.github.com/v4/enum/mergestatestatus + if (pr.mergeStateStatus === 'DIRTY') { + pr.isConflicted = true; + } else { + pr.isConflicted = false; + } + if (pr.commits.nodes.length === 1) { + if (global.gitAuthor) { + // Check against gitAuthor + const commitAuthorEmail = pr.commits.nodes[0].commit.author.email; + if (commitAuthorEmail === global.gitAuthor.email) { + pr.isModified = false; + } else { + logger.trace( + { + branchName, + prNo, + commitAuthorEmail, + gitAuthorEmail: global.gitAuthor.email, + }, + 'PR isModified=true: last committer has different email to the bot' + ); + pr.isModified = true; + } + } else { + // assume the author is us + // istanbul ignore next + pr.isModified = false; + } + } else { + // assume we can't rebase if more than 1 + logger.trace( + { + branchName, + prNo, + }, + 'PR isModified=true: PR has more than one commit' + ); + pr.isModified = true; + } + pr.isStale = false; + if (pr.mergeStateStatus === 'BEHIND') { + pr.isStale = true; + } else { + const baseCommitSHA = await getBaseCommitSHA(); + if ( + pr.commits.nodes[0].commit.parents.edges.length && + pr.commits.nodes[0].commit.parents.edges[0].node.oid !== + baseCommitSHA + ) { + pr.isStale = true; + } + } + if (pr.labels) { + pr.labels = pr.labels.nodes.map( + (label: { name: string }) => label.name + ); } + delete pr.mergeable; + delete pr.mergeStateStatus; + delete pr.commits; + config.openPrList[pr.number] = pr; + prNumbers.push(pr.number); } + prNumbers.sort(); + logger.trace({ prNumbers }, 'Retrieved open PR list with graphql'); + } catch (err) /* istanbul ignore next */ { + logger.warn({ query, err }, 'getOpenPrs error'); } } - `; - - const options = { - headers, - body: JSON.stringify({ query }), - json: false, - }; - - try { - const res = JSON.parse((await api.post(url, options)).body); - - if (!res.data) { - logger.info({ query, res }, 'No graphql res.data'); - return [false, [], null]; - } - - const cursor = res.data.repository.issues.pageInfo.hasNextPage - ? res.data.repository.issues.pageInfo.startCursor - : null; - - return [true, res.data.repository.issues.nodes, cursor]; - } catch (err) { - logger.warn({ query, err }, 'getGraphqlIssues error'); - throw new Error('platform-failure'); - } + return config.openPrList; } -// istanbul ignore next -async function getRestIssues() { - logger.debug('Retrieving issueList'); - const res = await api.get< - { - pull_request: boolean; - number: number; - state: string; - title: string; - }[] - >( - `repos/${config.repository}/issues?creator=${config.renovateUsername}&state=all&per_page=100&sort=created&direction=asc`, - { paginate: 'all', useCache: false } - ); - // istanbul ignore if - if (!is.array(res.body)) { - logger.warn({ responseBody: res.body }, 'Could not retrieve issue list'); - return []; +// Gets details for a PR +export async function getPr(prNo: number) { + if (!prNo) { + return null; } - return res.body - .filter(issue => !issue.pull_request) - .map(i => ({ - number: i.number, - state: i.state, - title: i.title, - })); -} - -export async function getIssueList() { - if (!config.issueList) { - logger.debug('Retrieving issueList'); - const filterBySupportMinimumGheVersion = '2.17.0'; - // istanbul ignore next - if ( - config.enterpriseVersion && - semver.lt(config.enterpriseVersion, filterBySupportMinimumGheVersion) - ) { - config.issueList = await getRestIssues(); - return config.issueList; - } - let [success, issues, cursor] = await getGraphqlIssues(); - config.issueList = []; - while (success) { - for (const issue of issues) { - issue.state = issue.state.toLowerCase(); - config.issueList.push(issue); - } - - if (!cursor) { - break; - } - // istanbul ignore next - [success, issues, cursor] = await getGraphqlIssues(cursor); - } - logger.debug('Retrieved ' + config.issueList.length + ' issues'); + const openPr = (await getOpenPrs())[prNo]; + if (openPr) { + logger.debug('Returning from graphql open PR list'); + return openPr; } - return config.issueList; -} - -export async function findIssue(title: string) { - logger.debug(`findIssue(${title})`); - const [issue] = (await getIssueList()).filter( - i => i.state === 'open' && i.title === title + const closedPr = (await getClosedPrs())[prNo]; + if (closedPr) { + logger.debug('Returning from graphql closed PR list'); + return closedPr; + } + logger.info( + { prNo }, + 'PR not found in open or closed PRs list - trying to fetch it directly' ); - if (!issue) { + const pr = (await api.get( + `repos/${config.parentRepo || config.repository}/pulls/${prNo}` + )).body; + if (!pr) { return null; } - logger.debug('Found issue ' + issue.number); - const issueBody = (await api.get( - `repos/${config.parentRepo || config.repository}/issues/${issue.number}` - )).body.body; - return { - number: issue.number, - body: issueBody, - }; -} - -export async function ensureIssue( - title: string, - rawbody: string, - once = false, - reopen = true -) { - logger.debug(`ensureIssue(${title})`); - const body = sanitize(rawbody); - try { - const issueList = await getIssueList(); - const issues = issueList.filter(i => i.title === title); - if (issues.length) { - let issue = issues.find(i => i.state === 'open'); - if (!issue) { - if (once) { - logger.debug('Issue already closed - skipping recreation'); - return null; - } - if (reopen) { - logger.info('Reopening previously closed issue'); - } - issue = issues[issues.length - 1]; - } - for (const i of issues) { - if (i.state === 'open' && i.number !== issue.number) { - logger.warn('Closing duplicate issue ' + i.number); - await closeIssue(i.number); + // Harmonise PR values + pr.displayNumber = `Pull Request #${pr.number}`; + if (pr.state === 'open') { + pr.isModified = true; + pr.branchName = pr.head ? pr.head.ref : undefined; + pr.sha = pr.head ? pr.head.sha : undefined; + if (pr.mergeable === true) { + pr.canMerge = true; + } + if (pr.mergeable_state === 'dirty') { + logger.debug({ prNo }, 'PR state is dirty so unmergeable'); + pr.isConflicted = true; + } + if (pr.commits === 1) { + if (global.gitAuthor) { + // Check against gitAuthor + const commitAuthorEmail = (await api.get( + `repos/${config.parentRepo || + config.repository}/pulls/${prNo}/commits` + )).body[0].commit.author.email; + if (commitAuthorEmail === global.gitAuthor.email) { + logger.debug( + { prNo }, + '1 commit matches configured gitAuthor so can rebase' + ); + pr.isModified = false; + } else { + logger.trace( + { + prNo, + commitAuthorEmail, + gitAuthorEmail: global.gitAuthor.email, + }, + 'PR isModified=true: 1 commit and not by configured gitAuthor so cannot rebase' + ); + pr.isModified = true; } + } else { + logger.debug( + { prNo }, + '1 commit and no configured gitAuthor so can rebase' + ); + pr.isModified = false; } - const issueBody = (await api.get( - `repos/${config.parentRepo || config.repository}/issues/${issue.number}` - )).body.body; - if (issueBody === body && issue.state === 'open') { - logger.info('Issue is open and up to date - nothing to do'); - return null; - } - if (reopen) { - logger.info('Patching issue'); - await api.patch( - `repos/${config.parentRepo || config.repository}/issues/${ - issue.number - }`, - { - body: { body, state: 'open' }, + } else { + // Check if only one author of all commits + logger.debug({ prNo }, 'Checking all commits'); + const prCommits = (await api.get( + `repos/${config.parentRepo || config.repository}/pulls/${prNo}/commits` + )).body; + // Filter out "Update branch" presses + const remainingCommits = prCommits.filter( + (commit: { + committer: { login: string }; + commit: { message: string }; + }) => { + const isWebflow = + commit.committer && commit.committer.login === 'web-flow'; + if (!isWebflow) { + // Not a web UI commit, so keep it + return true; } - ); - logger.info('Issue updated'); - return 'updated'; + const isUpdateBranch = + commit.commit && + commit.commit.message && + commit.commit.message.startsWith("Merge branch 'master' into"); + if (isUpdateBranch) { + // They just clicked the button + return false; + } + // They must have done some other edit through the web UI + return true; + } + ); + if (remainingCommits.length <= 1) { + pr.isModified = false; } } - await api.post(`repos/${config.parentRepo || config.repository}/issues`, { - body: { - title, - body, - }, - }); - logger.info('Issue created'); - // reset issueList so that it will be fetched again as-needed - delete config.issueList; - return 'created'; - } catch (err) /* istanbul ignore next */ { - if ( - err.body && - err.body.message && - err.body.message.startsWith('Issues are disabled for this repo') - ) { - logger.info( - `Issues are disabled, so could not create issue: ${err.message}` - ); - } else { - logger.warn({ err }, 'Could not ensure issue'); + const baseCommitSHA = await getBaseCommitSHA(); + if (!pr.base || pr.base.sha !== baseCommitSHA) { + pr.isStale = true; } } - return null; -} - -async function closeIssue(issueNumber: number) { - logger.debug(`closeIssue(${issueNumber})`); - await api.patch( - `repos/${config.parentRepo || config.repository}/issues/${issueNumber}`, - { - body: { state: 'closed' }, - } - ); + return pr; } -export async function ensureIssueClosing(title: string) { - logger.debug(`ensureIssueClosing(${title})`); - const issueList = await getIssueList(); - for (const issue of issueList) { - if (issue.state === 'open' && issue.title === title) { - await closeIssue(issue.number); - logger.info({ number: issue.number }, 'Issue closed'); - } +function matchesState(state: string, desiredState: string) { + if (desiredState === 'all') { + return true; } + if (desiredState[0] === '!') { + return state !== desiredState.substring(1); + } + return state === desiredState; } -export async function addAssignees(issueNo: number, assignees: string[]) { - logger.debug(`Adding assignees ${assignees} to #${issueNo}`); - const repository = config.parentRepo || config.repository; - await api.post(`repos/${repository}/issues/${issueNo}/assignees`, { - body: { - assignees, - }, - }); -} - -export async function addReviewers(prNo: number, reviewers: string[]) { - logger.debug(`Adding reviewers ${reviewers} to #${prNo}`); - - const userReviewers = reviewers.filter(e => !e.startsWith('team:')); - const teamReviewers = reviewers - .filter(e => e.startsWith('team:')) - .map(e => e.replace(/^team:/, '')); - - await api.post( - `repos/${config.parentRepo || - config.repository}/pulls/${prNo}/requested_reviewers`, - { - body: { - reviewers: userReviewers, - team_reviewers: teamReviewers, - }, - } - ); -} - -async function addLabels(issueNo: number, labels: string[] | null) { - logger.debug(`Adding labels ${labels} to #${issueNo}`); - const repository = config.parentRepo || config.repository; - if (is.array(labels) && labels.length) { - await api.post(`repos/${repository}/issues/${issueNo}/labels`, { - body: labels, - }); - } -} - -export async function deleteLabel(issueNo: number, label: string) { - logger.debug(`Deleting label ${label} from #${issueNo}`); - const repository = config.parentRepo || config.repository; - try { - await api.delete(`repos/${repository}/issues/${issueNo}/labels/${label}`); - } catch (err) /* istanbul ignore next */ { - logger.warn({ err, issueNo, label }, 'Failed to delete label'); - } -} - -async function getComments(issueNo: number) { - const pr = (await getClosedPrs())[issueNo]; - if (pr) { - logger.debug('Returning closed PR list comments'); - return pr.comments; - } - // GET /repos/:owner/:repo/issues/:number/comments - logger.debug(`Getting comments for #${issueNo}`); - const url = `repos/${config.parentRepo || - config.repository}/issues/${issueNo}/comments?per_page=100`; - try { - const comments = (await api.get<Comment[]>(url, { - paginate: true, - })).body; - logger.debug(`Found ${comments.length} comments`); - return comments; - } catch (err) /* istanbul ignore next */ { - if (err.statusCode === 404) { - logger.debug('404 respose when retrieving comments'); - throw new Error('platform-failure'); - } - throw err; - } -} - -async function addComment(issueNo: number, body: string) { - // POST /repos/:owner/:repo/issues/:number/comments - await api.post( - `repos/${config.parentRepo || - config.repository}/issues/${issueNo}/comments`, - { - body: { body }, - } - ); -} - -async function editComment(commentId: number, body: string) { - // PATCH /repos/:owner/:repo/issues/comments/:id - await api.patch( - `repos/${config.parentRepo || - config.repository}/issues/comments/${commentId}`, - { - body: { body }, - } - ); -} - -async function deleteComment(commentId: number) { - // DELETE /repos/:owner/:repo/issues/comments/:id - await api.delete( - `repos/${config.parentRepo || - config.repository}/issues/comments/${commentId}` - ); -} - -export async function ensureComment( - issueNo: number, - topic: string | null, - rawContent: string -) { - const content = sanitize(rawContent); - try { - const comments = await getComments(issueNo); - let body: string; - let commentId: number | null = null; - let commentNeedsUpdating = false; - if (topic) { - logger.debug(`Ensuring comment "${topic}" in #${issueNo}`); - body = `### ${topic}\n\n${content}`; - comments.forEach(comment => { - if (comment.body.startsWith(`### ${topic}\n\n`)) { - commentId = comment.id; - commentNeedsUpdating = comment.body !== body; - } - }); - } else { - logger.debug(`Ensuring content-only comment in #${issueNo}`); - body = `${content}`; - comments.forEach(comment => { - if (comment.body === body) { - commentId = comment.id; - commentNeedsUpdating = false; - } - }); - } - if (!commentId) { - await addComment(issueNo, body); - logger.info( - { repository: config.repository, issueNo, topic }, - 'Comment added' - ); - } else if (commentNeedsUpdating) { - await editComment(commentId, body); - logger.info( - { repository: config.repository, issueNo }, - 'Comment updated' - ); - } else { - logger.debug('Comment is already update-to-date'); - } - return true; - } catch (err) /* istanbul ignore next */ { - if (err.message === 'platform-failure') { - throw err; - } - if ( - err.message === 'Unable to create comment because issue is locked. (403)' - ) { - logger.info('Issue is locked - cannot add comment'); - } else { - logger.warn({ err }, 'Error ensuring comment'); - } - return false; - } -} - -export async function ensureCommentRemoval(issueNo: number, topic: string) { - logger.debug(`Ensuring comment "${topic}" in #${issueNo} is removed`); - const comments = await getComments(issueNo); - let commentId; - comments.forEach(comment => { - if (comment.body.startsWith(`### ${topic}\n\n`)) { - commentId = comment.id; - } - }); - try { - if (commentId) { - await deleteComment(commentId); - } - } catch (err) /* istanbul ignore next */ { - logger.warn({ err }, 'Error deleting comment'); - } -} - -// Pull Request - export async function getPrList() { logger.trace('getPrList()'); if (!config.prList) { @@ -1157,16 +944,6 @@ export async function getPrList() { return config.prList!; } -function matchesState(state: string, desiredState: string) { - if (desiredState === 'all') { - return true; - } - if (desiredState[0] === '!') { - return state !== desiredState.substring(1); - } - return state === desiredState; -} - export async function findPr( branchName: string, prTitle?: string | null, @@ -1186,399 +963,622 @@ export async function findPr( return pr; } -// Creates PR and returns PR number -export async function createPr( +// Returns the Pull Request for a branch. Null if not exists. +export async function getBranchPr(branchName: string) { + logger.debug(`getBranchPr(${branchName})`); + const existingPr = await findPr(branchName, null, 'open'); + return existingPr ? getPr(existingPr.number) : null; +} + +// Returns the combined status for a branch. +export async function getBranchStatus( branchName: string, - title: string, - rawBody: string, - labels: string[] | null, - useDefaultBranch: boolean, - platformOptions: { statusCheckVerify?: boolean } = {} + requiredStatusChecks: any ) { - const body = sanitize(rawBody); - const base = useDefaultBranch ? config.defaultBranch : config.baseBranch; - // Include the repository owner to handle forkMode and regular mode - const head = `${config.repository!.split('/')[0]}:${branchName}`; - const options: any = { - body: { - title, - head, - base, - body, - }, - }; - // istanbul ignore if - if (config.forkToken) { - options.token = config.forkToken; - options.body.maintainer_can_modify = true; - } - logger.debug({ title, head, base }, 'Creating PR'); - const pr = (await api.post<Pr>( - `repos/${config.parentRepo || config.repository}/pulls`, - options - )).body; - logger.debug({ branch: branchName, pr: pr.number }, 'PR created'); - // istanbul ignore if - if (config.prList) { - config.prList.push(pr); + logger.debug(`getBranchStatus(${branchName})`); + if (!requiredStatusChecks) { + // null means disable status checks, so it always succeeds + logger.debug('Status checks disabled = returning "success"'); + return 'success'; } - pr.displayNumber = `Pull Request #${pr.number}`; - pr.branchName = branchName; - await addLabels(pr.number, labels); - if (platformOptions.statusCheckVerify) { - logger.debug('Setting statusCheckVerify'); - await setBranchStatus( - branchName, - `${appSlug}/verify`, - `${appName} verified pull request`, - 'success', - urls.homepage - ); + if (requiredStatusChecks.length) { + // This is Unsupported + logger.warn({ requiredStatusChecks }, `Unsupported requiredStatusChecks`); + return 'failed'; } - pr.isModified = false; - return pr; -} - -async function getOpenPrs() { - // istanbul ignore if - if (config.isGhe) { - logger.debug( - 'Skipping unsupported graphql PullRequests.mergeStateStatus query on GHE' - ); - return {}; + const commitStatusUrl = `repos/${config.repository}/commits/${escapeHash( + branchName + )}/status`; + let commitStatus; + try { + commitStatus = (await api.get(commitStatusUrl)).body; + } catch (err) /* istanbul ignore next */ { + if (err.statusCode === 404) { + logger.info( + 'Received 404 when checking branch status, assuming that branch has been deleted' + ); + throw new Error('repository-changed'); + } + logger.info('Unknown error when checking branch status'); + throw err; } - if (!config.openPrList) { - config.openPrList = {}; - let query; + logger.debug( + { state: commitStatus.state, statuses: commitStatus.statuses }, + 'branch status check result' + ); + let checkRuns: { name: string; status: string; conclusion: string }[] = []; + if (!config.isGhe) { try { - const url = 'graphql'; - // https://developer.github.com/v4/previews/#mergeinfopreview---more-detailed-information-about-a-pull-requests-merge-state - const headers = { - accept: 'application/vnd.github.merge-info-preview+json', + const checkRunsUrl = `repos/${config.repository}/commits/${escapeHash( + branchName + )}/check-runs`; + const opts = { + headers: { + Accept: 'application/vnd.github.antiope-preview+json', + }, }; - // prettier-ignore - query = ` - query { - repository(owner: "${config.repositoryOwner}", name: "${config.repositoryName}") { - pullRequests(states: [OPEN], first: 100, orderBy: {field: UPDATED_AT, direction: DESC}) { - nodes { - number - headRefName - baseRefName - title - mergeable - mergeStateStatus - labels(last: 100) { - nodes { - name - } - } - commits(first: 2) { - nodes { - commit { - author { - email - } - committer { - email - } - parents(last: 1) { - edges { - node { - abbreviatedOid - oid - } - } - } - } - } - } - body - reviews(first: 1, states:[CHANGES_REQUESTED]){ - nodes{ - state - } - } - } - } - } + const checkRunsRaw = (await api.get(checkRunsUrl, opts)).body; + if (checkRunsRaw.check_runs && checkRunsRaw.check_runs.length) { + checkRuns = checkRunsRaw.check_runs.map( + (run: { name: string; status: string; conclusion: string }) => ({ + name: run.name, + status: run.status, + conclusion: run.conclusion, + }) + ); + logger.debug({ checkRuns }, 'check runs result'); + } else { + // istanbul ignore next + logger.debug({ result: checkRunsRaw }, 'No check runs found'); } - `; - const options = { - headers, - body: JSON.stringify({ query }), - json: false, - }; - const res = JSON.parse((await api.post(url, options)).body); - const prNumbers: number[] = []; - // istanbul ignore if - if (!res.data) { - logger.info({ query, res }, 'No graphql res.data'); - return {}; + } catch (err) /* istanbul ignore next */ { + if (err.message === 'platform-failure') { + throw err; } - for (const pr of res.data.repository.pullRequests.nodes) { - // https://developer.github.com/v4/object/pullrequest/ - pr.displayNumber = `Pull Request #${pr.number}`; - pr.state = 'open'; - pr.branchName = pr.headRefName; - const branchName = pr.branchName; - const prNo = pr.number; - delete pr.headRefName; - pr.targetBranch = pr.baseRefName; - delete pr.baseRefName; - // https://developer.github.com/v4/enum/mergeablestate - const canMergeStates = ['BEHIND', 'CLEAN']; - const hasNegativeReview = - pr.reviews && pr.reviews.nodes && pr.reviews.nodes.length > 0; - pr.canMerge = - canMergeStates.includes(pr.mergeStateStatus) && !hasNegativeReview; - // https://developer.github.com/v4/enum/mergestatestatus - if (pr.mergeStateStatus === 'DIRTY') { - pr.isConflicted = true; - } else { - pr.isConflicted = false; - } - if (pr.commits.nodes.length === 1) { - if (global.gitAuthor) { - // Check against gitAuthor - const commitAuthorEmail = pr.commits.nodes[0].commit.author.email; - if (commitAuthorEmail === global.gitAuthor.email) { - pr.isModified = false; - } else { - logger.trace( - { - branchName, - prNo, - commitAuthorEmail, - gitAuthorEmail: global.gitAuthor.email, - }, - 'PR isModified=true: last committer has different email to the bot' - ); - pr.isModified = true; - } - } else { - // assume the author is us - // istanbul ignore next - pr.isModified = false; - } - } else { - // assume we can't rebase if more than 1 - logger.trace( - { - branchName, - prNo, - }, - 'PR isModified=true: PR has more than one commit' - ); - pr.isModified = true; - } - pr.isStale = false; - if (pr.mergeStateStatus === 'BEHIND') { - pr.isStale = true; - } else { - const baseCommitSHA = await getBaseCommitSHA(); - if ( - pr.commits.nodes[0].commit.parents.edges.length && - pr.commits.nodes[0].commit.parents.edges[0].node.oid !== - baseCommitSHA - ) { - pr.isStale = true; - } + if ( + err.statusCode === 403 || + err.message === 'integration-unauthorized' + ) { + logger.info('No permission to view check runs'); + } else { + logger.warn({ err }, 'Error retrieving check runs'); + } + } + } + if (checkRuns.length === 0) { + return commitStatus.state; + } + if ( + commitStatus.state === 'failed' || + checkRuns.some(run => run.conclusion === 'failed') + ) { + return 'failed'; + } + if ( + (commitStatus.state === 'success' || commitStatus.statuses.length === 0) && + checkRuns.every(run => ['neutral', 'success'].includes(run.conclusion)) + ) { + return 'success'; + } + return 'pending'; +} + +export async function getBranchStatusCheck( + branchName: string, + context: string +) { + const branchCommit = await config.storage.getBranchCommit(branchName); + const url = `repos/${config.repository}/commits/${branchCommit}/statuses`; + try { + const res = await api.get(url); + for (const check of res.body) { + if (check.context === context) { + return check.state; + } + } + return null; + } catch (err) /* istanbul ignore next */ { + if (err.statusCode === 404) { + logger.info('Commit not found when checking statuses'); + throw new Error('repository-changed'); + } + throw err; + } +} + +export async function setBranchStatus( + branchName: string, + context: string, + description: string, + state: string, + targetUrl?: string +) { + // istanbul ignore if + if (config.parentRepo) { + logger.info('Cannot set branch status when in forking mode'); + return; + } + const existingStatus = await getBranchStatusCheck(branchName, context); + if (existingStatus === state) { + return; + } + logger.info({ branch: branchName, context, state }, 'Setting branch status'); + const branchCommit = await config.storage.getBranchCommit(branchName); + const url = `repos/${config.repository}/statuses/${branchCommit}`; + const options: any = { + state, + description, + context, + }; + if (targetUrl) { + options.target_url = targetUrl; + } + await api.post(url, { body: options }); +} + +// Issue + +/* istanbul ignore next */ +async function getGraphqlIssues(afterCursor: string | null = null) { + const url = 'graphql'; + const headers = { + accept: 'application/vnd.github.merge-info-preview+json', + }; + // prettier-ignore + const query = ` + query { + repository(owner: "${config.repositoryOwner}", name: "${config.repositoryName}") { + issues(first: 100, after:${afterCursor}, orderBy: {field: UPDATED_AT, direction: DESC}, filterBy: {createdBy: "${config.renovateUsername}"}) { + pageInfo { + startCursor + hasNextPage } - if (pr.labels) { - pr.labels = pr.labels.nodes.map( - (label: { name: string }) => label.name - ); + nodes { + number + state + title + body } - delete pr.mergeable; - delete pr.mergeStateStatus; - delete pr.commits; - config.openPrList[pr.number] = pr; - prNumbers.push(pr.number); } - prNumbers.sort(); - logger.trace({ prNumbers }, 'Retrieved open PR list with graphql'); - } catch (err) /* istanbul ignore next */ { - logger.warn({ query, err }, 'getOpenPrs error'); } } - return config.openPrList; + `; + + const options = { + headers, + body: JSON.stringify({ query }), + json: false, + }; + + try { + const res = JSON.parse((await api.post(url, options)).body); + + if (!res.data) { + logger.info({ query, res }, 'No graphql res.data'); + return [false, [], null]; + } + + const cursor = res.data.repository.issues.pageInfo.hasNextPage + ? res.data.repository.issues.pageInfo.startCursor + : null; + + return [true, res.data.repository.issues.nodes, cursor]; + } catch (err) { + logger.warn({ query, err }, 'getGraphqlIssues error'); + throw new Error('platform-failure'); + } +} + +// istanbul ignore next +async function getRestIssues() { + logger.debug('Retrieving issueList'); + const res = await api.get< + { + pull_request: boolean; + number: number; + state: string; + title: string; + }[] + >( + `repos/${config.repository}/issues?creator=${config.renovateUsername}&state=all&per_page=100&sort=created&direction=asc`, + { paginate: 'all', useCache: false } + ); + // istanbul ignore if + if (!is.array(res.body)) { + logger.warn({ responseBody: res.body }, 'Could not retrieve issue list'); + return []; + } + return res.body + .filter(issue => !issue.pull_request) + .map(i => ({ + number: i.number, + state: i.state, + title: i.title, + })); +} + +export async function getIssueList() { + if (!config.issueList) { + logger.debug('Retrieving issueList'); + const filterBySupportMinimumGheVersion = '2.17.0'; + // istanbul ignore next + if ( + config.enterpriseVersion && + semver.lt(config.enterpriseVersion, filterBySupportMinimumGheVersion) + ) { + config.issueList = await getRestIssues(); + return config.issueList; + } + let [success, issues, cursor] = await getGraphqlIssues(); + config.issueList = []; + while (success) { + for (const issue of issues) { + issue.state = issue.state.toLowerCase(); + config.issueList.push(issue); + } + + if (!cursor) { + break; + } + // istanbul ignore next + [success, issues, cursor] = await getGraphqlIssues(cursor); + } + logger.debug('Retrieved ' + config.issueList.length + ' issues'); + } + return config.issueList; } -async function getClosedPrs() { - if (!config.closedPrList) { - config.closedPrList = {}; - let query; - try { - const url = 'graphql'; - // prettier-ignore - query = ` - query { - repository(owner: "${config.repositoryOwner}", name: "${config.repositoryName}") { - pullRequests(states: [CLOSED, MERGED], first: 100, orderBy: {field: UPDATED_AT, direction: DESC}) { - nodes { - number - state - headRefName - title - comments(last: 100) { - nodes { - databaseId - body - } - } - } - } +export async function findIssue(title: string) { + logger.debug(`findIssue(${title})`); + const [issue] = (await getIssueList()).filter( + i => i.state === 'open' && i.title === title + ); + if (!issue) { + return null; + } + logger.debug('Found issue ' + issue.number); + const issueBody = (await api.get( + `repos/${config.parentRepo || config.repository}/issues/${issue.number}` + )).body.body; + return { + number: issue.number, + body: issueBody, + }; +} + +async function closeIssue(issueNumber: number) { + logger.debug(`closeIssue(${issueNumber})`); + await api.patch( + `repos/${config.parentRepo || config.repository}/issues/${issueNumber}`, + { + body: { state: 'closed' }, + } + ); +} + +export async function ensureIssue( + title: string, + rawbody: string, + once = false, + reopen = true +) { + logger.debug(`ensureIssue(${title})`); + const body = sanitize(rawbody); + try { + const issueList = await getIssueList(); + const issues = issueList.filter(i => i.title === title); + if (issues.length) { + let issue = issues.find(i => i.state === 'open'); + if (!issue) { + if (once) { + logger.debug('Issue already closed - skipping recreation'); + return null; } + if (reopen) { + logger.info('Reopening previously closed issue'); + } + issue = issues[issues.length - 1]; } - `; - const options = { - body: JSON.stringify({ query }), - json: false, - }; - const res = JSON.parse((await api.post(url, options)).body); - const prNumbers: number[] = []; - // istanbul ignore if - if (!res.data) { - logger.info( - { query, res }, - 'No graphql res.data, returning empty list' - ); - return {}; + for (const i of issues) { + if (i.state === 'open' && i.number !== issue.number) { + logger.warn('Closing duplicate issue ' + i.number); + await closeIssue(i.number); + } } - for (const pr of res.data.repository.pullRequests.nodes) { - // https://developer.github.com/v4/object/pullrequest/ - pr.displayNumber = `Pull Request #${pr.number}`; - pr.state = pr.state.toLowerCase(); - pr.branchName = pr.headRefName; - delete pr.headRefName; - pr.comments = pr.comments.nodes.map( - (comment: { databaseId: number; body: string }) => ({ - id: comment.databaseId, - body: comment.body, - }) + const issueBody = (await api.get( + `repos/${config.parentRepo || config.repository}/issues/${issue.number}` + )).body.body; + if (issueBody === body && issue.state === 'open') { + logger.info('Issue is open and up to date - nothing to do'); + return null; + } + if (reopen) { + logger.info('Patching issue'); + await api.patch( + `repos/${config.parentRepo || config.repository}/issues/${ + issue.number + }`, + { + body: { body, state: 'open' }, + } ); - pr.body = 'dummy body'; // just in case - config.closedPrList[pr.number] = pr; - prNumbers.push(pr.number); + logger.info('Issue updated'); + return 'updated'; } - prNumbers.sort(); - logger.debug({ prNumbers }, 'Retrieved closed PR list with graphql'); - } catch (err) /* istanbul ignore next */ { - logger.warn({ query, err }, 'getClosedPrs error'); } - } - return config.closedPrList; + await api.post(`repos/${config.parentRepo || config.repository}/issues`, { + body: { + title, + body, + }, + }); + logger.info('Issue created'); + // reset issueList so that it will be fetched again as-needed + delete config.issueList; + return 'created'; + } catch (err) /* istanbul ignore next */ { + if ( + err.body && + err.body.message && + err.body.message.startsWith('Issues are disabled for this repo') + ) { + logger.info( + `Issues are disabled, so could not create issue: ${err.message}` + ); + } else { + logger.warn({ err }, 'Could not ensure issue'); + } + } + return null; +} + +export async function ensureIssueClosing(title: string) { + logger.debug(`ensureIssueClosing(${title})`); + const issueList = await getIssueList(); + for (const issue of issueList) { + if (issue.state === 'open' && issue.title === title) { + await closeIssue(issue.number); + logger.info({ number: issue.number }, 'Issue closed'); + } + } +} + +export async function addAssignees(issueNo: number, assignees: string[]) { + logger.debug(`Adding assignees ${assignees} to #${issueNo}`); + const repository = config.parentRepo || config.repository; + await api.post(`repos/${repository}/issues/${issueNo}/assignees`, { + body: { + assignees, + }, + }); +} + +export async function addReviewers(prNo: number, reviewers: string[]) { + logger.debug(`Adding reviewers ${reviewers} to #${prNo}`); + + const userReviewers = reviewers.filter(e => !e.startsWith('team:')); + const teamReviewers = reviewers + .filter(e => e.startsWith('team:')) + .map(e => e.replace(/^team:/, '')); + + await api.post( + `repos/${config.parentRepo || + config.repository}/pulls/${prNo}/requested_reviewers`, + { + body: { + reviewers: userReviewers, + team_reviewers: teamReviewers, + }, + } + ); +} + +async function addLabels(issueNo: number, labels: string[] | null) { + logger.debug(`Adding labels ${labels} to #${issueNo}`); + const repository = config.parentRepo || config.repository; + if (is.array(labels) && labels.length) { + await api.post(`repos/${repository}/issues/${issueNo}/labels`, { + body: labels, + }); + } +} + +export async function deleteLabel(issueNo: number, label: string) { + logger.debug(`Deleting label ${label} from #${issueNo}`); + const repository = config.parentRepo || config.repository; + try { + await api.delete(`repos/${repository}/issues/${issueNo}/labels/${label}`); + } catch (err) /* istanbul ignore next */ { + logger.warn({ err, issueNo, label }, 'Failed to delete label'); + } +} + +async function addComment(issueNo: number, body: string) { + // POST /repos/:owner/:repo/issues/:number/comments + await api.post( + `repos/${config.parentRepo || + config.repository}/issues/${issueNo}/comments`, + { + body: { body }, + } + ); +} + +async function editComment(commentId: number, body: string) { + // PATCH /repos/:owner/:repo/issues/comments/:id + await api.patch( + `repos/${config.parentRepo || + config.repository}/issues/comments/${commentId}`, + { + body: { body }, + } + ); } -// Gets details for a PR -export async function getPr(prNo: number) { - if (!prNo) { - return null; - } - const openPr = (await getOpenPrs())[prNo]; - if (openPr) { - logger.debug('Returning from graphql open PR list'); - return openPr; - } - const closedPr = (await getClosedPrs())[prNo]; - if (closedPr) { - logger.debug('Returning from graphql closed PR list'); - return closedPr; - } - logger.info( - { prNo }, - 'PR not found in open or closed PRs list - trying to fetch it directly' +async function deleteComment(commentId: number) { + // DELETE /repos/:owner/:repo/issues/comments/:id + await api.delete( + `repos/${config.parentRepo || + config.repository}/issues/comments/${commentId}` ); - const pr = (await api.get( - `repos/${config.parentRepo || config.repository}/pulls/${prNo}` - )).body; - if (!pr) { - return null; +} + +async function getComments(issueNo: number) { + const pr = (await getClosedPrs())[issueNo]; + if (pr) { + logger.debug('Returning closed PR list comments'); + return pr.comments; } - // Harmonise PR values - pr.displayNumber = `Pull Request #${pr.number}`; - if (pr.state === 'open') { - pr.isModified = true; - pr.branchName = pr.head ? pr.head.ref : undefined; - pr.sha = pr.head ? pr.head.sha : undefined; - if (pr.mergeable === true) { - pr.canMerge = true; - } - if (pr.mergeable_state === 'dirty') { - logger.debug({ prNo }, 'PR state is dirty so unmergeable'); - pr.isConflicted = true; + // GET /repos/:owner/:repo/issues/:number/comments + logger.debug(`Getting comments for #${issueNo}`); + const url = `repos/${config.parentRepo || + config.repository}/issues/${issueNo}/comments?per_page=100`; + try { + const comments = (await api.get<Comment[]>(url, { + paginate: true, + })).body; + logger.debug(`Found ${comments.length} comments`); + return comments; + } catch (err) /* istanbul ignore next */ { + if (err.statusCode === 404) { + logger.debug('404 respose when retrieving comments'); + throw new Error('platform-failure'); } - if (pr.commits === 1) { - if (global.gitAuthor) { - // Check against gitAuthor - const commitAuthorEmail = (await api.get( - `repos/${config.parentRepo || - config.repository}/pulls/${prNo}/commits` - )).body[0].commit.author.email; - if (commitAuthorEmail === global.gitAuthor.email) { - logger.debug( - { prNo }, - '1 commit matches configured gitAuthor so can rebase' - ); - pr.isModified = false; - } else { - logger.trace( - { - prNo, - commitAuthorEmail, - gitAuthorEmail: global.gitAuthor.email, - }, - 'PR isModified=true: 1 commit and not by configured gitAuthor so cannot rebase' - ); - pr.isModified = true; + throw err; + } +} + +export async function ensureComment( + issueNo: number, + topic: string | null, + rawContent: string +) { + const content = sanitize(rawContent); + try { + const comments = await getComments(issueNo); + let body: string; + let commentId: number | null = null; + let commentNeedsUpdating = false; + if (topic) { + logger.debug(`Ensuring comment "${topic}" in #${issueNo}`); + body = `### ${topic}\n\n${content}`; + comments.forEach(comment => { + if (comment.body.startsWith(`### ${topic}\n\n`)) { + commentId = comment.id; + commentNeedsUpdating = comment.body !== body; } - } else { - logger.debug( - { prNo }, - '1 commit and no configured gitAuthor so can rebase' - ); - pr.isModified = false; - } + }); } else { - // Check if only one author of all commits - logger.debug({ prNo }, 'Checking all commits'); - const prCommits = (await api.get( - `repos/${config.parentRepo || config.repository}/pulls/${prNo}/commits` - )).body; - // Filter out "Update branch" presses - const remainingCommits = prCommits.filter( - (commit: { - committer: { login: string }; - commit: { message: string }; - }) => { - const isWebflow = - commit.committer && commit.committer.login === 'web-flow'; - if (!isWebflow) { - // Not a web UI commit, so keep it - return true; - } - const isUpdateBranch = - commit.commit && - commit.commit.message && - commit.commit.message.startsWith("Merge branch 'master' into"); - if (isUpdateBranch) { - // They just clicked the button - return false; - } - // They must have done some other edit through the web UI - return true; + logger.debug(`Ensuring content-only comment in #${issueNo}`); + body = `${content}`; + comments.forEach(comment => { + if (comment.body === body) { + commentId = comment.id; + commentNeedsUpdating = false; } + }); + } + if (!commentId) { + await addComment(issueNo, body); + logger.info( + { repository: config.repository, issueNo, topic }, + 'Comment added' ); - if (remainingCommits.length <= 1) { - pr.isModified = false; - } + } else if (commentNeedsUpdating) { + await editComment(commentId, body); + logger.info( + { repository: config.repository, issueNo }, + 'Comment updated' + ); + } else { + logger.debug('Comment is already update-to-date'); } - const baseCommitSHA = await getBaseCommitSHA(); - if (!pr.base || pr.base.sha !== baseCommitSHA) { - pr.isStale = true; + return true; + } catch (err) /* istanbul ignore next */ { + if (err.message === 'platform-failure') { + throw err; + } + if ( + err.message === 'Unable to create comment because issue is locked. (403)' + ) { + logger.info('Issue is locked - cannot add comment'); + } else { + logger.warn({ err }, 'Error ensuring comment'); + } + return false; + } +} + +export async function ensureCommentRemoval(issueNo: number, topic: string) { + logger.debug(`Ensuring comment "${topic}" in #${issueNo} is removed`); + const comments = await getComments(issueNo); + let commentId; + comments.forEach(comment => { + if (comment.body.startsWith(`### ${topic}\n\n`)) { + commentId = comment.id; + } + }); + try { + if (commentId) { + await deleteComment(commentId); } + } catch (err) /* istanbul ignore next */ { + logger.warn({ err }, 'Error deleting comment'); + } +} + +// Pull Request + +// Creates PR and returns PR number +export async function createPr( + branchName: string, + title: string, + rawBody: string, + labels: string[] | null, + useDefaultBranch: boolean, + platformOptions: { statusCheckVerify?: boolean } = {} +) { + const body = sanitize(rawBody); + const base = useDefaultBranch ? config.defaultBranch : config.baseBranch; + // Include the repository owner to handle forkMode and regular mode + const head = `${config.repository!.split('/')[0]}:${branchName}`; + const options: any = { + body: { + title, + head, + base, + body, + }, + }; + // istanbul ignore if + if (config.forkToken) { + options.token = config.forkToken; + options.body.maintainer_can_modify = true; + } + logger.debug({ title, head, base }, 'Creating PR'); + const pr = (await api.post<Pr>( + `repos/${config.parentRepo || config.repository}/pulls`, + options + )).body; + logger.debug({ branch: branchName, pr: pr.number }, 'PR created'); + // istanbul ignore if + if (config.prList) { + config.prList.push(pr); + } + pr.displayNumber = `Pull Request #${pr.number}`; + pr.branchName = branchName; + await addLabels(pr.number, labels); + if (platformOptions.statusCheckVerify) { + logger.debug('Setting statusCheckVerify'); + await setBranchStatus( + branchName, + `${appSlug}/verify`, + `${appName} verified pull request`, + 'success', + urls.homepage + ); } + pr.isModified = false; return pr; } diff --git a/lib/platform/gitlab/index.ts b/lib/platform/gitlab/index.ts index dddb023449ae0517ccd9b9ae6849ef9ee3fb7ea3..3c8cc0786245ec639e7321361ce37f1ecaeaa31e 100644 --- a/lib/platform/gitlab/index.ts +++ b/lib/platform/gitlab/index.ts @@ -227,13 +227,232 @@ export function getFileList(branchName = config.baseBranch) { return config.storage.getFileList(branchName); } -// Branch - // Returns true if branch exists, otherwise false export function branchExists(branchName: string) { return config.storage.branchExists(branchName); } +// Returns the combined status for a branch. +export async function getBranchStatus( + branchName: string, + requiredStatusChecks?: string[] | null +) { + logger.debug(`getBranchStatus(${branchName})`); + if (!requiredStatusChecks) { + // null means disable status checks, so it always succeeds + return 'success'; + } + if (Array.isArray(requiredStatusChecks) && requiredStatusChecks.length) { + // This is Unsupported + logger.warn({ requiredStatusChecks }, `Unsupported requiredStatusChecks`); + return 'failed'; + } + + if (!(await branchExists(branchName))) { + throw new Error('repository-changed'); + } + + // First, get the branch commit SHA + const branchSha = await config.storage.getBranchCommit(branchName); + // Now, check the statuses for that commit + const url = `projects/${config.repository}/repository/commits/${branchSha}/statuses`; + const res = await api.get(url, { paginate: true }); + logger.debug(`Got res with ${res.body.length} results`); + if (res.body.length === 0) { + // Return 'pending' if we have no status checks + return 'pending'; + } + let status = 'success'; + // Return 'success' if all are success + res.body.forEach((check: { status: string; allow_failure?: boolean }) => { + // If one is failed then don't overwrite that + if (status !== 'failure') { + if (!check.allow_failure) { + if (check.status === 'failed') { + status = 'failure'; + } else if (check.status !== 'success') { + ({ status } = check); + } + } + } + }); + return status; +} + +// Pull Request + +export async function createPr( + branchName: string, + title: string, + rawDescription: string, + labels?: string[] | null, + useDefaultBranch?: boolean, + platformOptions?: PlatformPrOptions +) { + const description = sanitize(rawDescription); + const targetBranch = useDefaultBranch + ? config.defaultBranch + : config.baseBranch; + logger.debug(`Creating Merge Request: ${title}`); + const res = await api.post(`projects/${config.repository}/merge_requests`, { + body: { + source_branch: branchName, + target_branch: targetBranch, + remove_source_branch: true, + title, + description, + labels: is.array(labels) ? labels.join(',') : null, + }, + }); + const pr = res.body; + pr.number = pr.iid; + pr.branchName = branchName; + pr.displayNumber = `Merge Request #${pr.iid}`; + pr.isModified = false; + // istanbul ignore if + if (config.prList) { + config.prList.push(pr); + } + if (platformOptions && platformOptions.gitLabAutomerge) { + try { + await api.put( + `projects/${config.repository}/merge_requests/${pr.iid}/merge`, + { + body: { + should_remove_source_branch: true, + merge_when_pipeline_succeeds: true, + }, + } + ); + } catch (err) /* istanbul ignore next */ { + logger.debug({ err }, 'Automerge on PR creation failed'); + } + } + + return pr; +} + +export async function getPr(iid: number) { + logger.debug(`getPr(${iid})`); + const url = `projects/${config.repository}/merge_requests/${iid}?include_diverged_commits_count=1`; + const pr = (await api.get(url)).body; + // Harmonize fields with GitHub + pr.branchName = pr.source_branch; + pr.targetBranch = pr.target_branch; + pr.number = pr.iid; + pr.displayNumber = `Merge Request #${pr.iid}`; + pr.body = pr.description; + pr.isStale = pr.diverged_commits_count > 0; + pr.state = pr.state === 'opened' ? 'open' : pr.state; + pr.isModified = true; + if (pr.merge_status === 'cannot_be_merged') { + logger.debug('pr cannot be merged'); + pr.canMerge = false; + pr.isConflicted = true; + } else if (pr.state === 'open') { + const branchStatus = await getBranchStatus(pr.branchName, []); + if (branchStatus === 'success') { + pr.canMerge = true; + } + } + // Check if the most recent branch commit is by us + // If not then we don't allow it to be rebased, in case someone's changes would be lost + const branchUrl = `projects/${ + config.repository + }/repository/branches/${urlEscape(pr.source_branch)}`; + try { + const branch = (await api.get(branchUrl)).body; + const branchCommitEmail = + branch && branch.commit ? branch.commit.author_email : null; + // istanbul ignore if + if (branchCommitEmail === config.email) { + pr.isModified = false; + } else { + logger.debug( + { branchCommitEmail, configEmail: config.email, iid: pr.iid }, + 'Last committer to branch does not match bot email, so PR cannot be rebased.' + ); + pr.isModified = true; + } + } catch (err) { + logger.debug({ err }, 'Error getting PR branch'); + if (pr.state === 'open' || err.statusCode !== 404) { + logger.warn({ err }, 'Error getting PR branch'); + pr.isConflicted = true; + } + } + return pr; +} + +// Return a list of all modified files in a PR +export async function getPrFiles(mrNo: number) { + logger.debug({ mrNo }, 'getPrFiles'); + if (!mrNo) { + return []; + } + const files = (await api.get( + `projects/${config.repository}/merge_requests/${mrNo}/changes` + )).body.changes; + return files.map((f: { new_path: string }) => f.new_path); +} + +// istanbul ignore next +async function closePr(iid: number) { + await api.put(`projects/${config.repository}/merge_requests/${iid}`, { + body: { + state_event: 'close', + }, + }); +} + +export async function updatePr( + iid: number, + title: string, + description: string +) { + await api.put(`projects/${config.repository}/merge_requests/${iid}`, { + body: { + title, + description: sanitize(description), + }, + }); +} + +export async function mergePr(iid: number) { + try { + await api.put(`projects/${config.repository}/merge_requests/${iid}/merge`, { + body: { + should_remove_source_branch: true, + }, + }); + return true; + } catch (err) /* istanbul ignore next */ { + if (err.statusCode === 401) { + logger.info('No permissions to merge PR'); + return false; + } + if (err.statusCode === 406) { + logger.info('PR not acceptable for merging'); + return false; + } + logger.debug({ err }, 'merge PR error'); + logger.info('PR merge failed'); + return false; + } +} + +export function getPrBody(input: string) { + return smartTruncate( + input + .replace(/Pull Request/g, 'Merge Request') + .replace(/PR/g, 'MR') + .replace(/\]\(\.\.\/pull\//g, '](../merge_requests/'), + 1000000 + ); +} + +// Branch + // Returns the Pull Request for a branch. Null if not exists. export async function getBranchPr(branchName: string) { logger.debug(`getBranchPr(${branchName})`); @@ -312,53 +531,6 @@ export function getRepoStatus() { return config.storage.getRepoStatus(); } -// Returns the combined status for a branch. -export async function getBranchStatus( - branchName: string, - requiredStatusChecks?: string[] | null -) { - logger.debug(`getBranchStatus(${branchName})`); - if (!requiredStatusChecks) { - // null means disable status checks, so it always succeeds - return 'success'; - } - if (Array.isArray(requiredStatusChecks) && requiredStatusChecks.length) { - // This is Unsupported - logger.warn({ requiredStatusChecks }, `Unsupported requiredStatusChecks`); - return 'failed'; - } - - if (!(await branchExists(branchName))) { - throw new Error('repository-changed'); - } - - // First, get the branch commit SHA - const branchSha = await config.storage.getBranchCommit(branchName); - // Now, check the statuses for that commit - const url = `projects/${config.repository}/repository/commits/${branchSha}/statuses`; - const res = await api.get(url, { paginate: true }); - logger.debug(`Got res with ${res.body.length} results`); - if (res.body.length === 0) { - // Return 'pending' if we have no status checks - return 'pending'; - } - let status = 'success'; - // Return 'success' if all are success - res.body.forEach((check: { status: string; allow_failure?: boolean }) => { - // If one is failed then don't overwrite that - if (status !== 'failure') { - if (!check.allow_failure) { - if (check.status === 'failed') { - status = 'failure'; - } else if (check.status !== 'success') { - ({ status } = check); - } - } - } - }); - return status; -} - export async function getBranchStatusCheck( branchName: string, context: string @@ -692,178 +864,6 @@ export async function findPr( ); } -// Pull Request - -export async function createPr( - branchName: string, - title: string, - rawDescription: string, - labels?: string[] | null, - useDefaultBranch?: boolean, - platformOptions?: PlatformPrOptions -) { - const description = sanitize(rawDescription); - const targetBranch = useDefaultBranch - ? config.defaultBranch - : config.baseBranch; - logger.debug(`Creating Merge Request: ${title}`); - const res = await api.post(`projects/${config.repository}/merge_requests`, { - body: { - source_branch: branchName, - target_branch: targetBranch, - remove_source_branch: true, - title, - description, - labels: is.array(labels) ? labels.join(',') : null, - }, - }); - const pr = res.body; - pr.number = pr.iid; - pr.branchName = branchName; - pr.displayNumber = `Merge Request #${pr.iid}`; - pr.isModified = false; - // istanbul ignore if - if (config.prList) { - config.prList.push(pr); - } - if (platformOptions && platformOptions.gitLabAutomerge) { - try { - await api.put( - `projects/${config.repository}/merge_requests/${pr.iid}/merge`, - { - body: { - should_remove_source_branch: true, - merge_when_pipeline_succeeds: true, - }, - } - ); - } catch (err) /* istanbul ignore next */ { - logger.debug({ err }, 'Automerge on PR creation failed'); - } - } - - return pr; -} - -export async function getPr(iid: number) { - logger.debug(`getPr(${iid})`); - const url = `projects/${config.repository}/merge_requests/${iid}?include_diverged_commits_count=1`; - const pr = (await api.get(url)).body; - // Harmonize fields with GitHub - pr.branchName = pr.source_branch; - pr.targetBranch = pr.target_branch; - pr.number = pr.iid; - pr.displayNumber = `Merge Request #${pr.iid}`; - pr.body = pr.description; - pr.isStale = pr.diverged_commits_count > 0; - pr.state = pr.state === 'opened' ? 'open' : pr.state; - pr.isModified = true; - if (pr.merge_status === 'cannot_be_merged') { - logger.debug('pr cannot be merged'); - pr.canMerge = false; - pr.isConflicted = true; - } else if (pr.state === 'open') { - const branchStatus = await getBranchStatus(pr.branchName, []); - if (branchStatus === 'success') { - pr.canMerge = true; - } - } - // Check if the most recent branch commit is by us - // If not then we don't allow it to be rebased, in case someone's changes would be lost - const branchUrl = `projects/${ - config.repository - }/repository/branches/${urlEscape(pr.source_branch)}`; - try { - const branch = (await api.get(branchUrl)).body; - const branchCommitEmail = - branch && branch.commit ? branch.commit.author_email : null; - // istanbul ignore if - if (branchCommitEmail === config.email) { - pr.isModified = false; - } else { - logger.debug( - { branchCommitEmail, configEmail: config.email, iid: pr.iid }, - 'Last committer to branch does not match bot email, so PR cannot be rebased.' - ); - pr.isModified = true; - } - } catch (err) { - logger.debug({ err }, 'Error getting PR branch'); - if (pr.state === 'open' || err.statusCode !== 404) { - logger.warn({ err }, 'Error getting PR branch'); - pr.isConflicted = true; - } - } - return pr; -} - -// Return a list of all modified files in a PR -export async function getPrFiles(mrNo: number) { - logger.debug({ mrNo }, 'getPrFiles'); - if (!mrNo) { - return []; - } - const files = (await api.get( - `projects/${config.repository}/merge_requests/${mrNo}/changes` - )).body.changes; - return files.map((f: { new_path: string }) => f.new_path); -} - -// istanbul ignore next -async function closePr(iid: number) { - await api.put(`projects/${config.repository}/merge_requests/${iid}`, { - body: { - state_event: 'close', - }, - }); -} - -export async function updatePr( - iid: number, - title: string, - description: string -) { - await api.put(`projects/${config.repository}/merge_requests/${iid}`, { - body: { - title, - description: sanitize(description), - }, - }); -} - -export async function mergePr(iid: number) { - try { - await api.put(`projects/${config.repository}/merge_requests/${iid}/merge`, { - body: { - should_remove_source_branch: true, - }, - }); - return true; - } catch (err) /* istanbul ignore next */ { - if (err.statusCode === 401) { - logger.info('No permissions to merge PR'); - return false; - } - if (err.statusCode === 406) { - logger.info('PR not acceptable for merging'); - return false; - } - logger.debug({ err }, 'merge PR error'); - logger.info('PR merge failed'); - return false; - } -} - -export function getPrBody(input: string) { - return smartTruncate( - input - .replace(/Pull Request/g, 'Merge Request') - .replace(/PR/g, 'MR') - .replace(/\]\(\.\.\/pull\//g, '](../merge_requests/'), - 1000000 - ); -} - export function getCommitMessages() { return config.storage.getCommitMessages(); } diff --git a/lib/versioning/cargo/index.ts b/lib/versioning/cargo/index.ts index 6446cbcb7a7257b159b434bedc61f1ab66b94af6..a33f78be123834b340f5f84eee6537ccca3224de 100644 --- a/lib/versioning/cargo/index.ts +++ b/lib/versioning/cargo/index.ts @@ -1,6 +1,8 @@ import { api as npm } from '../npm'; import { VersioningApi, RangeStrategy } from '../common'; +const isVersion = (input: string) => npm.isVersion(input); + function convertToCaret(item: string) { // In Cargo, "1.2.3" doesn't mean exactly 1.2.3, it means >= 1.2.3 < 2.0.0 if (isVersion(item)) { @@ -43,8 +45,6 @@ const isLessThanRange = (version: string, range: string) => export const isValid = (input: string) => npm.isValid(cargo2npm(input)); -const isVersion = (input: string) => npm.isVersion(input); - const matches = (version: string, range: string) => npm.matches(version, cargo2npm(range)); diff --git a/lib/versioning/index.ts b/lib/versioning/index.ts index 1f0655eae1516ec848102e36a17b8633c8827e65..09c0c5fe54cde6dfbcc010d4b0a4acca24b00ce6 100644 --- a/lib/versioning/index.ts +++ b/lib/versioning/index.ts @@ -18,9 +18,7 @@ for (const scheme of supportedSchemes) { schemes[scheme] = require('./' + scheme).api; // eslint-disable-line } -export { get }; - -function get(versionScheme: string): VersioningApi { +export function get(versionScheme: string): VersioningApi { if (!versionScheme) { logger.debug('Missing versionScheme'); return schemes.semver as VersioningApi; diff --git a/lib/versioning/loose/generic.ts b/lib/versioning/loose/generic.ts index 12eddcd7cd557fd91cb19980527187c3062e42f9..bec102da6293ed2e7fb4c02ca5e505b8f63baf4d 100644 --- a/lib/versioning/loose/generic.ts +++ b/lib/versioning/loose/generic.ts @@ -12,25 +12,6 @@ export interface VersionComparator { (version: string, other: string): number; } -// helper functions to ease create other versioning schemas with little code -// especially if those schemas do not support ranges -export const create = ({ - parse, - compare, -}: { - parse: VersionParser; - compare: VersionComparator; -}) => { - let schema: VersioningApi = {} as any; - if (parse) { - schema = { ...schema, ...parser(parse) }; - } - if (compare) { - schema = { ...schema, ...comparer(compare) }; - } - return schema; -}; - // since this file was meant for no range support, a range = version // parse should return null if version not valid // parse should return an object with property release, an array of version sections major.minor.patch @@ -119,6 +100,25 @@ export const comparer = ( }; }; +// helper functions to ease create other versioning schemas with little code +// especially if those schemas do not support ranges +export const create = ({ + parse, + compare, +}: { + parse: VersionParser; + compare: VersionComparator; +}) => { + let schema: VersioningApi = {} as any; + if (parse) { + schema = { ...schema, ...parser(parse) }; + } + if (compare) { + schema = { ...schema, ...comparer(compare) }; + } + return schema; +}; + export abstract class GenericVersioningApi< T extends GenericVersion = GenericVersion > implements VersioningApi { diff --git a/lib/versioning/maven/compare.ts b/lib/versioning/maven/compare.ts index 46534840ad01af7dfa7f239eb9803c7769f93334..32af458aa7686a517484fb62e5c2184b98c58e38 100644 --- a/lib/versioning/maven/compare.ts +++ b/lib/versioning/maven/compare.ts @@ -267,25 +267,9 @@ function isVersion(version: string) { return !!tokens.length; } -function isValid(str: string) { - if (!str) { - return false; - } - return isVersion(str) || !!parseRange(str); -} - const INCLUDING_POINT = 'INCLUDING_POINT'; const EXCLUDING_POINT = 'EXCLUDING_POINT'; -export interface Range { - leftType: typeof INCLUDING_POINT | typeof EXCLUDING_POINT; - leftValue: string; - leftBracket: string; - rightType: typeof INCLUDING_POINT | typeof EXCLUDING_POINT; - rightValue: string; - rightBracket: string; -} - function parseRange(rangeStr: string) { function emptyInterval(): Range { return { @@ -383,6 +367,22 @@ function parseRange(rangeStr: string) { ); } +function isValid(str: string) { + if (!str) { + return false; + } + return isVersion(str) || !!parseRange(str); +} + +export interface Range { + leftType: typeof INCLUDING_POINT | typeof EXCLUDING_POINT; + leftValue: string; + leftBracket: string; + rightType: typeof INCLUDING_POINT | typeof EXCLUDING_POINT; + rightValue: string; + rightBracket: string; +} + function rangeToStr(fullRange: Range[]): string | null { if (fullRange === null) return null; diff --git a/lib/versioning/npm/range.ts b/lib/versioning/npm/range.ts index 4bf0bef94b885a8042af7a95966ab1e8a3c6ac2c..92a3ef86bfe4f875cded4476b8aa8262ff77b861 100644 --- a/lib/versioning/npm/range.ts +++ b/lib/versioning/npm/range.ts @@ -11,9 +11,7 @@ import { parseRange } from 'semver-utils'; import { logger } from '../../logger'; import { RangeStrategy } from '../common'; -export { getNewValue }; - -function getNewValue( +export function getNewValue( currentValue: string, rangeStrategy: RangeStrategy, fromVersion: string, diff --git a/lib/versioning/pep440/range.ts b/lib/versioning/pep440/range.ts index cf9aad9bf627c5d5f287f7de3f56b976ae5c8f88..309940718469232ee4615c40a205d5b719736d5f 100644 --- a/lib/versioning/pep440/range.ts +++ b/lib/versioning/pep440/range.ts @@ -4,8 +4,6 @@ import { parse as parseRange } from '@renovate/pep440/lib/specifier'; import { logger } from '../../logger'; import { RangeStrategy } from '../common'; -export { getNewValue }; - function getFutureVersion( baseVersion: string, toVersion: string, @@ -37,7 +35,7 @@ interface Range { version: string; } -function getNewValue( +export function getNewValue( currentValue: string, rangeStrategy: RangeStrategy, fromVersion: string, diff --git a/lib/versioning/poetry/index.ts b/lib/versioning/poetry/index.ts index 5ca4e130c1142b0278f3dc183974b980606eab9c..6964d671d15276a05e2c31276fe82854e6b97b94 100644 --- a/lib/versioning/poetry/index.ts +++ b/lib/versioning/poetry/index.ts @@ -62,6 +62,25 @@ const isSingleVersion = (constraint: string) => )) || isVersion(constraint.trim()); +function handleShort( + operator: string, + currentValue: string, + toVersion: string +) { + const toVersionMajor = major(toVersion); + const toVersionMinor = minor(toVersion); + const split = currentValue.split('.'); + if (split.length === 1) { + // [^,~]4 + return operator + toVersionMajor; + } + if (split.length === 2) { + // [^,~]4.1 + return operator + toVersionMajor + '.' + toVersionMinor; + } + return null; +} + function getNewValue( currentValue: string, rangeStrategy: RangeStrategy, @@ -97,25 +116,6 @@ function getNewValue( return newPoetry; } -function handleShort( - operator: string, - currentValue: string, - toVersion: string -) { - const toVersionMajor = major(toVersion); - const toVersionMinor = minor(toVersion); - const split = currentValue.split('.'); - if (split.length === 1) { - // [^,~]4 - return operator + toVersionMajor; - } - if (split.length === 2) { - // [^,~]4.1 - return operator + toVersionMajor + '.' + toVersionMinor; - } - return null; -} - export const api: VersioningApi = { ...npm, getNewValue, diff --git a/lib/versioning/regex/index.ts b/lib/versioning/regex/index.ts index 7ae233e300b6994305f719e9fb0df087527a4e14..94395785f4ba2412d67fc299f5c82a5069ba19a6 100644 --- a/lib/versioning/regex/index.ts +++ b/lib/versioning/regex/index.ts @@ -13,6 +13,15 @@ export interface RegExpVersion extends GenericVersion { compatibility: string; } +// convenience method for passing a Version object into any semver.* method. +function asSemver(version: RegExpVersion): string { + let vstring = `${version.release[0]}.${version.release[1]}.${version.release[2]}`; + if (typeof version.prerelease !== 'undefined') { + vstring += `-${version.prerelease}`; + } + return vstring; +} + export class RegExpVersioningApi extends GenericVersioningApi<RegExpVersion> { // config is expected to be overridden by a user-specified RegExp value // sample values: @@ -112,15 +121,6 @@ export class RegExpVersioningApi extends GenericVersioningApi<RegExpVersion> { } } -// convenience method for passing a Version object into any semver.* method. -function asSemver(version: RegExpVersion): string { - let vstring = `${version.release[0]}.${version.release[1]}.${version.release[2]}`; - if (typeof version.prerelease !== 'undefined') { - vstring += `-${version.prerelease}`; - } - return vstring; -} - export const api: VersioningApiConstructor = RegExpVersioningApi; export default api; diff --git a/lib/workers/branch/index.ts b/lib/workers/branch/index.ts index 8123ee46ebe764d53a5db14ee34db422c69bf8c9..b4dcdc07a8138612f0ced83a84051b367b1a1bf2 100644 --- a/lib/workers/branch/index.ts +++ b/lib/workers/branch/index.ts @@ -42,6 +42,17 @@ export type ProcessBranchResult = | 'pr-edited' | 'pr-hourly-limit-reached'; +// TODO: proper typings +function rebaseCheck(config: RenovateConfig, branchPr: any): boolean { + const titleRebase = branchPr.title && branchPr.title.startsWith('rebase!'); + const labelRebase = + branchPr.labels && branchPr.labels.includes(config.rebaseLabel); + const prRebaseChecked = + branchPr.body && branchPr.body.includes(`- [x] <!-- ${appSlug}-rebase -->`); + + return titleRebase || labelRebase || prRebaseChecked; +} + export async function processBranch( branchConfig: BranchConfig, prHourlyLimitReached?: boolean, @@ -555,14 +566,3 @@ export async function processBranch( } return 'done'; } - -// TODO: proper typings -function rebaseCheck(config: RenovateConfig, branchPr: any): boolean { - const titleRebase = branchPr.title && branchPr.title.startsWith('rebase!'); - const labelRebase = - branchPr.labels && branchPr.labels.includes(config.rebaseLabel); - const prRebaseChecked = - branchPr.body && branchPr.body.includes(`- [x] <!-- ${appSlug}-rebase -->`); - - return titleRebase || labelRebase || prRebaseChecked; -} diff --git a/lib/workers/global/limits.ts b/lib/workers/global/limits.ts index 3e97cf8d719a966ce3d389222a0dafd4a91e49e7..ed2f2cf670d952ffba3bfdd3bba30126422d7525 100644 --- a/lib/workers/global/limits.ts +++ b/lib/workers/global/limits.ts @@ -4,26 +4,26 @@ const limitsToInit = ['prCommitsPerRunLimit']; const l: Record<string, number> = {}; const v: Record<string, number> = {}; +export function setLimit(name: string, value: number) { + logger.debug(`Limits.setLimit l[${name}] = ${value}`); + l[name] = value; +} + export function init(config: Record<string, any>) { - logger.info(`Limits.init enter method`); + logger.debug(`Limits.init enter method`); for (const limit of limitsToInit) { - logger.info(`Limits.init ${limit} processing`); + logger.debug(`Limits.init ${limit} processing`); if (config[limit]) { setLimit(limit, config[limit]); v[limit] = 0; } else { - logger.info( + logger.debug( `Limits.init ${limit} variable is not set. Ignoring ${limit}` ); } } } -export function setLimit(name: string, value: number) { - logger.debug(`Limits.setLimit l[${name}] = ${value}`); - l[name] = value; -} - export function getLimitRemaining(name: string) { let result; if (typeof v[name] !== 'undefined') { diff --git a/lib/workers/repository/process/lookup/index.ts b/lib/workers/repository/process/lookup/index.ts index f2e4235334ec3e50546e8f5a92a6d897d24b6040..6a9539373d1f8f02242c1341ab8632c6423e97cf 100644 --- a/lib/workers/repository/process/lookup/index.ts +++ b/lib/workers/repository/process/lookup/index.ts @@ -50,6 +50,84 @@ export interface LookupUpdateConfig separateMultipleMajor?: boolean; } +function getType( + config: LookupUpdateConfig, + fromVersion: string, + toVersion: string +): string { + const { versionScheme, rangeStrategy, currentValue } = config; + const version = versioning.get(versionScheme); + if (rangeStrategy === 'bump' && version.matches(toVersion, currentValue)) { + return 'bump'; + } + if (version.getMajor(toVersion) > version.getMajor(fromVersion)) { + return 'major'; + } + if (version.getMinor(toVersion) > version.getMinor(fromVersion)) { + return 'minor'; + } + if (config.separateMinorPatch) { + return 'patch'; + } + if (config.patch.automerge && !config.minor.automerge) { + return 'patch'; + } + return 'minor'; +} + +function getFromVersion( + config: LookupUpdateConfig, + rangeStrategy: string, + latestVersion: string, + allVersions: string[] +): string | null { + const { currentValue, lockedVersion, versionScheme } = config; + const version = versioning.get(versionScheme); + if (version.isVersion(currentValue)) { + return currentValue; + } + if (version.isSingleVersion(currentValue)) { + return currentValue.replace(/=/g, '').trim(); + } + logger.trace(`currentValue ${currentValue} is range`); + let useVersions = allVersions.filter(v => version.matches(v, currentValue)); + if (latestVersion && version.matches(latestVersion, currentValue)) { + useVersions = useVersions.filter( + v => !version.isGreaterThan(v, latestVersion) + ); + } + if (rangeStrategy === 'pin') { + return ( + lockedVersion || version.maxSatisfyingVersion(useVersions, currentValue) + ); + } + if (rangeStrategy === 'bump') { + // Use the lowest version in the current range + return version.minSatisfyingVersion(useVersions, currentValue); + } + // Use the highest version in the current range + return version.maxSatisfyingVersion(useVersions, currentValue); +} + +function getBucket(config: LookupUpdateConfig, update: LookupUpdate) { + const { separateMajorMinor, separateMultipleMajor } = config; + const { updateType, newMajor } = update; + if (updateType === 'lockfileUpdate') { + return updateType; + } + if ( + !separateMajorMinor || + config.major.automerge === true || + (config.automerge && config.major.automerge !== false) + ) { + return 'latest'; + } + if (separateMultipleMajor && updateType === 'major') { + return `major-${newMajor}`; + } + return updateType; +} + export async function lookupUpdates( config: LookupUpdateConfig ): Promise<UpdateResult> { @@ -339,81 +417,3 @@ export async function lookupUpdates( } return res; } - -function getType( - config: LookupUpdateConfig, - fromVersion: string, - toVersion: string -): string { - const { versionScheme, rangeStrategy, currentValue } = config; - const version = versioning.get(versionScheme); - if (rangeStrategy === 'bump' && version.matches(toVersion, currentValue)) { - return 'bump'; - } - if (version.getMajor(toVersion) > version.getMajor(fromVersion)) { - return 'major'; - } - if (version.getMinor(toVersion) > version.getMinor(fromVersion)) { - return 'minor'; - } - if (config.separateMinorPatch) { - return 'patch'; - } - if (config.patch.automerge && !config.minor.automerge) { - return 'patch'; - } - return 'minor'; -} - -function getBucket(config: LookupUpdateConfig, update: LookupUpdate) { - const { separateMajorMinor, separateMultipleMajor } = config; - const { updateType, newMajor } = update; - if (updateType === 'lockfileUpdate') { - return updateType; - } - if ( - !separateMajorMinor || - config.major.automerge === true || - (config.automerge && config.major.automerge !== false) - ) { - return 'latest'; - } - if (separateMultipleMajor && updateType === 'major') { - return `major-${newMajor}`; - } - return updateType; -} - -function getFromVersion( - config: LookupUpdateConfig, - rangeStrategy: string, - latestVersion: string, - allVersions: string[] -): string | null { - const { currentValue, lockedVersion, versionScheme } = config; - const version = versioning.get(versionScheme); - if (version.isVersion(currentValue)) { - return currentValue; - } - if (version.isSingleVersion(currentValue)) { - return currentValue.replace(/=/g, '').trim(); - } - logger.trace(`currentValue ${currentValue} is range`); - let useVersions = allVersions.filter(v => version.matches(v, currentValue)); - if (latestVersion && version.matches(latestVersion, currentValue)) { - useVersions = useVersions.filter( - v => !version.isGreaterThan(v, latestVersion) - ); - } - if (rangeStrategy === 'pin') { - return ( - lockedVersion || version.maxSatisfyingVersion(useVersions, currentValue) - ); - } - if (rangeStrategy === 'bump') { - // Use the lowest version in the current range - return version.minSatisfyingVersion(useVersions, currentValue); - } - // Use the highest version in the current range - return version.maxSatisfyingVersion(useVersions, currentValue); -} diff --git a/lib/workers/repository/updates/generate.ts b/lib/workers/repository/updates/generate.ts index d77afc91f1fc68d9205c67859bd6d326f9e10949..54b702c57237bb6784cf1417b8cc322c32e52048 100644 --- a/lib/workers/repository/updates/generate.ts +++ b/lib/workers/repository/updates/generate.ts @@ -23,6 +23,42 @@ function ifTypesGroup( ); } +function getTableValues( + upgrade: PackageDependency & ManagerConfig +): [string, string, string, string] | null { + if (!upgrade.commitBodyTable) { + return null; + } + const { + datasource, + lookupName, + depName, + fromVersion, + toVersion, + displayFrom, + displayTo, + } = upgrade; + const name = lookupName || depName; + const from = fromVersion || displayFrom; + const to = toVersion || displayTo; + if (datasource && name && from && to) { + return [datasource, name, from, to]; + } + logger.debug( + { + datasource, + lookupName, + depName, + fromVersion, + toVersion, + displayFrom, + displayTo, + }, + 'Cannot determine table values' + ); + return null; +} + export function generateBranchConfig(branchUpgrades) { logger.debug(`generateBranchConfig(${branchUpgrades.length})`); logger.trace({ config: branchUpgrades }); @@ -290,39 +326,3 @@ export function generateBranchConfig(branchUpgrades) { } return config; } - -function getTableValues( - upgrade: PackageDependency & ManagerConfig -): [string, string, string, string] | null { - if (!upgrade.commitBodyTable) { - return null; - } - const { - datasource, - lookupName, - depName, - fromVersion, - toVersion, - displayFrom, - displayTo, - } = upgrade; - const name = lookupName || depName; - const from = fromVersion || displayFrom; - const to = toVersion || displayTo; - if (datasource && name && from && to) { - return [datasource, name, from, to]; - } - logger.debug( - { - datasource, - lookupName, - depName, - fromVersion, - toVersion, - displayFrom, - displayTo, - }, - 'Cannot determine table values' - ); - return null; -} diff --git a/test/datasource/maven.spec.ts b/test/datasource/maven.spec.ts index 734a4e458713480b0e6fedcec48a4e4eda9700f1..7581c3ab362d6180a41d470b491dd8fd8e71dc46 100644 --- a/test/datasource/maven.spec.ts +++ b/test/datasource/maven.spec.ts @@ -85,6 +85,10 @@ describe('datasource/maven', () => { nock.enableNetConnect(); }); + function generateReleases(versions) { + return versions.map(v => ({ version: v })); + } + describe('getPkgReleases', () => { it('should return empty if library is not found', async () => { const releases = await datasource.getPkgReleases({ @@ -277,7 +281,3 @@ describe('datasource/maven', () => { }); }); }); - -function generateReleases(versions) { - return versions.map(v => ({ version: v })); -} diff --git a/test/platform/azure/index.spec.ts b/test/platform/azure/index.spec.ts index 9215c91b3c7dbbcdc3458ed56fb67baf4e595c66..4c9640a826d26397794463e4283eb717318cf0f2 100644 --- a/test/platform/azure/index.spec.ts +++ b/test/platform/azure/index.spec.ts @@ -113,18 +113,6 @@ describe('platform/azure', () => { }); }); - describe('getRepoStatus()', () => { - it('exists', async () => { - await initRepo(); - expect(await azure.getRepoStatus()).toBeUndefined(); - }); - }); - - describe('cleanRepo()', () => { - it('exists', () => { - azure.cleanRepo(); - }); - }); function initRepo(args?: Partial<RepoParams> | string) { azureApi.gitApi.mockImplementationOnce( () => @@ -172,6 +160,19 @@ describe('platform/azure', () => { } as any); } + describe('getRepoStatus()', () => { + it('exists', async () => { + await initRepo(); + expect(await azure.getRepoStatus()).toBeUndefined(); + }); + }); + + describe('cleanRepo()', () => { + it('exists', () => { + azure.cleanRepo(); + }); + }); + describe('initRepo', () => { it(`should initialise the config for a repo`, async () => { const config = await initRepo({ diff --git a/test/platform/gitlab/index.spec.ts b/test/platform/gitlab/index.spec.ts index 12c8056936a906475f365d605e6ff121b7a6753c..1b13516bda3265b109b0ed79fd97a79d2f6f7c5b 100644 --- a/test/platform/gitlab/index.spec.ts +++ b/test/platform/gitlab/index.spec.ts @@ -125,17 +125,6 @@ describe('platform/gitlab', () => { expect(repos).toMatchSnapshot(); }); }); - describe('getRepoStatus()', () => { - it('exists', async () => { - await initRepo(); - await gitlab.getRepoStatus(); - }); - }); - describe('cleanRepo()', () => { - it('exists', () => { - gitlab.cleanRepo(); - }); - }); function initRepo(args?: any) { // projects/${config.repository} api.get.mockImplementationOnce( @@ -166,6 +155,17 @@ describe('platform/gitlab', () => { optimizeForDisabled: false, }); } + describe('getRepoStatus()', () => { + it('exists', async () => { + await initRepo(); + await gitlab.getRepoStatus(); + }); + }); + describe('cleanRepo()', () => { + it('exists', () => { + gitlab.cleanRepo(); + }); + }); describe('initRepo', () => { it(`should throw error if disabled in renovate.json`, async () => {