From 4539487d26cf80f64fc3a7b3c2ee71d23fda738d Mon Sep 17 00:00:00 2001 From: Rhys Arkins <rhys@arkins.net> Date: Wed, 5 Sep 2018 14:18:31 +0200 Subject: [PATCH] refactor: less logging --- lib/manager/dockerfile/extract.js | 2 +- lib/platform/github/gh-got-wrapper.js | 2 +- lib/platform/github/storage.js | 4 ++-- lib/platform/gitlab/gl-got-wrapper.js | 2 +- lib/workers/branch/index.js | 2 +- lib/workers/branch/parent.js | 2 +- lib/workers/global/cache.js | 8 ++++---- lib/workers/pr/index.js | 6 +++--- lib/workers/repository/init/config.js | 2 +- lib/workers/repository/init/semantic.js | 4 ++-- lib/workers/repository/process/fetch.js | 2 +- lib/workers/repository/process/limits.js | 4 ++-- lib/workers/repository/process/write.js | 5 ++++- 13 files changed, 24 insertions(+), 21 deletions(-) diff --git a/lib/manager/dockerfile/extract.js b/lib/manager/dockerfile/extract.js index 3950da0cd6..4d5bcbf51d 100644 --- a/lib/manager/dockerfile/extract.js +++ b/lib/manager/dockerfile/extract.js @@ -86,7 +86,7 @@ function extractDependencies(content) { logger.debug({ currentFrom }, 'Skipping alias FROM'); } else { const dep = getDep(currentFrom); - logger.info( + logger.debug( { depName: dep.depName, currentTag: dep.currentTag, diff --git a/lib/platform/github/gh-got-wrapper.js b/lib/platform/github/gh-got-wrapper.js index 21e49bfb59..878b73af98 100644 --- a/lib/platform/github/gh-got-wrapper.js +++ b/lib/platform/github/gh-got-wrapper.js @@ -19,7 +19,7 @@ async function get(path, options, retries = 5) { logger.trace({ path }, 'Returning cached result'); return cache[path]; } - logger.debug(`${method.toUpperCase()} ${path}`); + logger.trace(`${method.toUpperCase()} ${path}`); stats.requests = (stats.requests || []).concat([ method.toUpperCase() + ' ' + path.replace(opts.endpoint, ''), ]); diff --git a/lib/platform/github/storage.js b/lib/platform/github/storage.js index 0bbcd22112..8e64e26e7b 100644 --- a/lib/platform/github/storage.js +++ b/lib/platform/github/storage.js @@ -280,11 +280,11 @@ class Storage { try { if (isBranchExisting) { await updateBranch(branchName, commit); - logger.debug({ branchName }, 'Branch updated'); + logger.info({ branchName }, 'Branch updated'); return 'updated'; } await createBranch(branchName, commit); - logger.debug({ branchName }, 'Branch created'); + logger.info({ branchName }, 'Branch created'); return 'created'; } catch (err) /* istanbul ignore next */ { logger.debug({ diff --git a/lib/platform/gitlab/gl-got-wrapper.js b/lib/platform/gitlab/gl-got-wrapper.js index 370545fa38..959062d8bd 100644 --- a/lib/platform/gitlab/gl-got-wrapper.js +++ b/lib/platform/gitlab/gl-got-wrapper.js @@ -16,7 +16,7 @@ async function get(path, options, retries = 5) { const method = opts.method || 'get'; const useCache = opts.useCache || true; if (method === 'get' && useCache && cache[path]) { - logger.debug({ path }, 'Returning cached result'); + logger.trace({ path }, 'Returning cached result'); return cache[path]; } logger.debug({ path }, method.toUpperCase()); diff --git a/lib/workers/branch/index.js b/lib/workers/branch/index.js index 0a4e718d01..cbd6f315dd 100644 --- a/lib/workers/branch/index.js +++ b/lib/workers/branch/index.js @@ -38,7 +38,7 @@ async function processBranch(branchConfig, packageFiles) { return 'pr-hourly-limit-reached'; } try { - logger.info( + logger.debug( `Branch has ${dependencies ? dependencies.length : 0} upgrade(s)` ); diff --git a/lib/workers/branch/parent.js b/lib/workers/branch/parent.js index bbfd86af62..a6b7e5a620 100644 --- a/lib/workers/branch/parent.js +++ b/lib/workers/branch/parent.js @@ -10,7 +10,7 @@ async function getParentBranch(config) { logger.info(`Branch needs creating`); return { parentBranch: undefined }; } - logger.info(`Branch already exists`); + logger.debug(`Branch already exists`); // Check for existing PR const pr = await platform.getBranchPr(branchName); diff --git a/lib/workers/global/cache.js b/lib/workers/global/cache.js index ba8ee941be..075d01cdce 100644 --- a/lib/workers/global/cache.js +++ b/lib/workers/global/cache.js @@ -19,20 +19,20 @@ async function get(namespace, key) { const cachedValue = JSON.parse(res.data.toString()); if (cachedValue) { if (DateTime.local() < DateTime.fromISO(cachedValue.expiry)) { - logger.debug({ namespace, key }, 'Returning cached value'); + logger.trace({ namespace, key }, 'Returning cached value'); return cachedValue.value; } // istanbul ignore next await rm(namespace, key); } } catch (err) { - logger.debug({ namespace, key }, 'Cache miss'); + logger.trace({ namespace, key }, 'Cache miss'); } return null; } async function set(namespace, key, value, ttlMinutes = 5) { - logger.debug({ namespace, key, ttlMinutes }, 'Saving cached value'); + logger.trace({ namespace, key, ttlMinutes }, 'Saving cached value'); await cacache.put( renovateCache, getKey(namespace, key), @@ -45,7 +45,7 @@ async function set(namespace, key, value, ttlMinutes = 5) { // istanbul ignore next async function rm(namespace, key) { - logger.debug({ namespace, key }, 'Removing cache entry'); + logger.trace({ namespace, key }, 'Removing cache entry'); await cacache.rm.entry(renovateCache, getKey(namespace, key)); } diff --git a/lib/workers/pr/index.js b/lib/workers/pr/index.js index 73d72079d8..0d29079279 100644 --- a/lib/workers/pr/index.js +++ b/lib/workers/pr/index.js @@ -237,7 +237,7 @@ async function ensurePr(prConfig) { } existingPrBody = existingPrBody.trim(); if (existingPr.title === prTitle && existingPrBody === prBody) { - logger.info(`${existingPr.displayNumber} does not need updating`); + logger.debug(`${existingPr.displayNumber} does not need updating`); return existingPr; } // PR must need updating @@ -378,7 +378,7 @@ async function checkAutoMerge(pr, config) { `Checking #${pr.number} for automerge` ); if (automerge) { - logger.info('PR is configured for automerge'); + logger.debug('PR is configured for automerge'); // Return if PR not ready for automerge if (pr.isUnmergeable) { logger.info('PR is not mergeable'); @@ -396,7 +396,7 @@ async function checkAutoMerge(pr, config) { ); logger.debug(`branchStatus=${branchStatus}`); if (branchStatus !== 'success') { - logger.info('Branch status is not "success"'); + logger.debug('Branch status is not "success"'); return false; } // Check if it's been touched diff --git a/lib/workers/repository/init/config.js b/lib/workers/repository/init/config.js index 51ea51b136..00675882a0 100644 --- a/lib/workers/repository/init/config.js +++ b/lib/workers/repository/init/config.js @@ -96,7 +96,7 @@ async function mergeRenovateConfig(config) { error.validationMessage = 'JSON.parse error: ' + err.message; throw error; } - logger.info({ config: renovateJson }, 'renovate.json config'); + logger.info({ configFile, config: renovateJson }, 'Repository config'); } const migratedConfig = await migrateAndValidate(config, renovateJson); if (migratedConfig.errors.length) { diff --git a/lib/workers/repository/init/semantic.js b/lib/workers/repository/init/semantic.js index 5297c160b4..942f52836c 100644 --- a/lib/workers/repository/init/semantic.js +++ b/lib/workers/repository/init/semantic.js @@ -18,10 +18,10 @@ async function detectSemanticCommits(config) { const type = conventionalCommitsDetector(commitMessages); logger.debug('Semantic commits detection: ' + type); if (type === 'angular') { - logger.info('angular semantic commits detected'); + logger.debug('angular semantic commits detected'); return true; } - logger.info('No semantic commits detected'); + logger.debug('No semantic commits detected'); return false; } diff --git a/lib/workers/repository/process/fetch.js b/lib/workers/repository/process/fetch.js index f7662b8ef4..7cc9e9f90a 100644 --- a/lib/workers/repository/process/fetch.js +++ b/lib/workers/repository/process/fetch.js @@ -51,7 +51,7 @@ async function fetchDepUpdates(packageFileConfig, dep) { } // istanbul ignore if if (dep.updates.length) { - logger.info( + logger.debug( { dependency: depName }, `${dep.updates.length} result(s): ${dep.updates.map( upgrade => upgrade.newValue diff --git a/lib/workers/repository/process/limits.js b/lib/workers/repository/process/limits.js index 1ca9a18db2..69537f28fd 100644 --- a/lib/workers/repository/process/limits.js +++ b/lib/workers/repository/process/limits.js @@ -19,7 +19,7 @@ async function getPrHourlyRemaining(config) { moment(pr.createdAt).isAfter(currentHourStart) ).length; const prsRemaining = config.prHourlyLimit - soFarThisHour; - logger.info(`PR hourly limit remaining: ${prsRemaining}`); + logger.debug(`PR hourly limit remaining: ${prsRemaining}`); return prsRemaining; } catch (err) { logger.error('Error checking PRs created per hour'); @@ -39,7 +39,7 @@ async function getConcurrentPrsRemaining(config, branches) { } logger.debug(`${currentlyOpen} PRs are currently open`); const concurrentRemaining = config.prConcurrentLimit - currentlyOpen; - logger.info(`PR concurrent limit remaining: ${concurrentRemaining}`); + logger.debug(`PR concurrent limit remaining: ${concurrentRemaining}`); return concurrentRemaining; } return 99; diff --git a/lib/workers/repository/process/write.js b/lib/workers/repository/process/write.js index 267db0dcdd..d9beae7e4a 100644 --- a/lib/workers/repository/process/write.js +++ b/lib/workers/repository/process/write.js @@ -9,7 +9,10 @@ module.exports = { async function writeUpdates(config, packageFiles, allBranches) { let branches = allBranches; - logger.info(`Processing ${branches.length} branch(es)`); + logger.info( + { branchList: branches.map(b => b.branchName).sort() }, + `Processing ${branches.length} branch${branches.length && 'es'}` + ); if (!config.mirrorMode) { branches = branches.filter(branchConfig => { if (branchConfig.blockedByPin) { -- GitLab