[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]
[tor-commits] [metrics-web/release] Replace Java logging with slf4j.
commit 406d0e93210d1839390d58205b05fb5748531b2a
Author: Karsten Loesing <karsten.loesing@xxxxxxx>
Date: Tue Aug 21 20:47:33 2018 +0200
Replace Java logging with slf4j.
Fixes the last part of #26190.
---
.../metrics/stats/servers/Configuration.java | 37 ++++----
.../stats/servers/ConsensusStatsFileHandler.java | 81 ++++++++----------
.../torproject/metrics/stats/servers/LockFile.java | 18 ++--
.../stats/servers/LoggingConfiguration.java | 98 ----------------------
.../org/torproject/metrics/stats/servers/Main.java | 16 ++--
.../servers/RelayDescriptorDatabaseImporter.java | 86 ++++++++-----------
6 files changed, 108 insertions(+), 228 deletions(-)
diff --git a/src/main/java/org/torproject/metrics/stats/servers/Configuration.java b/src/main/java/org/torproject/metrics/stats/servers/Configuration.java
index 5be8d88..8435b90 100644
--- a/src/main/java/org/torproject/metrics/stats/servers/Configuration.java
+++ b/src/main/java/org/torproject/metrics/stats/servers/Configuration.java
@@ -3,6 +3,9 @@
package org.torproject.metrics.stats.servers;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
@@ -11,8 +14,6 @@ import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
/**
* Initialize configuration with hard-coded defaults, overwrite with
@@ -21,6 +22,8 @@ import java.util.logging.Logger;
*/
public class Configuration {
+ private static Logger log = LoggerFactory.getLogger(Configuration.class);
+
private boolean importDirectoryArchives = false;
private List<File> directoryArchivesDirectories = new ArrayList<>();
@@ -47,21 +50,16 @@ public class Configuration {
/** Initializes this configuration class. */
public Configuration() {
- /* Initialize logger. */
- Logger logger = Logger.getLogger(Configuration.class.getName());
-
/* Read config file, if present. */
File configFile = new File("config");
if (!configFile.exists()) {
- logger.warning("Could not find config file.");
+ log.warn("Could not find config file.");
return;
}
String line = null;
try (BufferedReader br = new BufferedReader(new FileReader(configFile))) {
while ((line = br.readLine()) != null) {
- if (line.startsWith("#") || line.length() < 1) {
- continue;
- } else if (line.startsWith("ImportDirectoryArchives")) {
+ if (line.startsWith("ImportDirectoryArchives")) {
this.importDirectoryArchives = Integer.parseInt(
line.split(" ")[1]) != 0;
} else if (line.startsWith("DirectoryArchivesDirectory")) {
@@ -90,27 +88,26 @@ public class Configuration {
} else if (line.startsWith("WriteBridgeStats")) {
this.writeBridgeStats = Integer.parseInt(
line.split(" ")[1]) != 0;
- } else {
- logger.severe("Configuration file contains unrecognized "
- + "configuration key in line '" + line + "'! Exiting!");
+ } else if (!line.startsWith("#") && line.length() > 0) {
+ log.error("Configuration file contains unrecognized "
+ + "configuration key in line '{}'! Exiting!", line);
System.exit(1);
}
}
} catch (ArrayIndexOutOfBoundsException e) {
- logger.severe("Configuration file contains configuration key "
- + "without value in line '" + line + "'. Exiting!");
+ log.warn("Configuration file contains configuration key without value in "
+ + "line '{}'. Exiting!", line);
System.exit(1);
} catch (MalformedURLException e) {
- logger.severe("Configuration file contains illegal URL or IP:port "
- + "pair in line '" + line + "'. Exiting!");
+ log.warn("Configuration file contains illegal URL or IP:port pair in "
+ + "line '{}'. Exiting!", line);
System.exit(1);
} catch (NumberFormatException e) {
- logger.severe("Configuration file contains illegal value in line '"
- + line + "' with legal values being 0 or 1. Exiting!");
+ log.warn("Configuration file contains illegal value in line '{}' with "
+ + "legal values being 0 or 1. Exiting!", line);
System.exit(1);
} catch (IOException e) {
- logger.log(Level.SEVERE, "Unknown problem while reading config "
- + "file! Exiting!", e);
+ log.error("Unknown problem while reading config file! Exiting!", e);
System.exit(1);
}
}
diff --git a/src/main/java/org/torproject/metrics/stats/servers/ConsensusStatsFileHandler.java b/src/main/java/org/torproject/metrics/stats/servers/ConsensusStatsFileHandler.java
index 491170e..960069c 100644
--- a/src/main/java/org/torproject/metrics/stats/servers/ConsensusStatsFileHandler.java
+++ b/src/main/java/org/torproject/metrics/stats/servers/ConsensusStatsFileHandler.java
@@ -9,6 +9,9 @@ import org.torproject.descriptor.DescriptorReader;
import org.torproject.descriptor.DescriptorSourceFactory;
import org.torproject.descriptor.NetworkStatusEntry;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
@@ -28,8 +31,6 @@ import java.util.Map;
import java.util.SortedMap;
import java.util.TimeZone;
import java.util.TreeMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
/**
* Generates statistics on the average number of relays and bridges per
@@ -63,10 +64,8 @@ public class ConsensusStatsFileHandler {
*/
private SortedMap<String, String> bridgesPerDay;
- /**
- * Logger for this class.
- */
- private Logger logger;
+ private static Logger log = LoggerFactory.getLogger(
+ ConsensusStatsFileHandler.class);
private int bridgeResultsAdded = 0;
@@ -113,14 +112,10 @@ public class ConsensusStatsFileHandler {
this.dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- /* Initialize logger. */
- this.logger = Logger.getLogger(
- ConsensusStatsFileHandler.class.getName());
-
/* Read in number of running bridges per bridge status. */
if (this.bridgeConsensusStatsRawFile.exists()) {
- this.logger.fine("Reading file "
- + this.bridgeConsensusStatsRawFile.getAbsolutePath() + "...");
+ log.debug("Reading file {}...",
+ this.bridgeConsensusStatsRawFile.getAbsolutePath());
try (BufferedReader br = new BufferedReader(new FileReader(
this.bridgeConsensusStatsRawFile))) {
String line;
@@ -131,9 +126,9 @@ public class ConsensusStatsFileHandler {
}
String[] parts = line.split(",");
if (parts.length < 2 || parts.length > 4) {
- this.logger.warning("Corrupt line '" + line + "' in file "
- + this.bridgeConsensusStatsRawFile.getAbsolutePath()
- + "! Aborting to read this file!");
+ log.warn("Corrupt line '{}' in file {}! Aborting to read this "
+ + "file!", line,
+ this.bridgeConsensusStatsRawFile.getAbsolutePath());
break;
}
/* Assume that all lines without authority nickname are based on
@@ -149,12 +144,11 @@ public class ConsensusStatsFileHandler {
} /* No more cases as we already checked the range above. */
this.bridgesRaw.put(key, value);
}
- this.logger.fine("Finished reading file "
- + this.bridgeConsensusStatsRawFile.getAbsolutePath() + ".");
+ log.debug("Finished reading file {}.",
+ this.bridgeConsensusStatsRawFile.getAbsolutePath());
} catch (IOException e) {
- this.logger.log(Level.WARNING, "Failed to read file "
- + this.bridgeConsensusStatsRawFile.getAbsolutePath() + "!",
- e);
+ log.warn("Failed to read file {}!",
+ this.bridgeConsensusStatsRawFile.getAbsolutePath(), e);
}
}
}
@@ -169,14 +163,13 @@ public class ConsensusStatsFileHandler {
+ authorityNickname;
String line = publishedAuthority + "," + running + "," + runningEc2Bridges;
if (!this.bridgesRaw.containsKey(publishedAuthority)) {
- this.logger.finer("Adding new bridge numbers: " + line);
+ log.debug("Adding new bridge numbers: {}", line);
this.bridgesRaw.put(publishedAuthority, line);
this.bridgeResultsAdded++;
} else if (!line.equals(this.bridgesRaw.get(publishedAuthority))) {
- this.logger.warning("The numbers of running bridges we were just "
- + "given (" + line + ") are different from what we learned "
- + "before (" + this.bridgesRaw.get(publishedAuthority) + ")! "
- + "Overwriting!");
+ log.warn("The numbers of running bridges we were just given ({}) are "
+ + "different from what we learned before ({})! Overwriting!", line,
+ this.bridgesRaw.get(publishedAuthority));
this.bridgesRaw.put(publishedAuthority, line);
}
}
@@ -184,7 +177,7 @@ public class ConsensusStatsFileHandler {
/** Imports sanitized bridge descriptors. */
public void importSanitizedBridges() {
if (bridgesDir.exists()) {
- logger.fine("Importing files in directory " + bridgesDir + "/...");
+ log.debug("Importing files in directory {}/...", bridgesDir);
DescriptorReader reader =
DescriptorSourceFactory.createDescriptorReader();
File historyFile = new File(statsDirectory,
@@ -207,8 +200,8 @@ public class ConsensusStatsFileHandler {
authority = "Serge";
}
if (authority == null) {
- this.logger.warning("Did not recognize the bridge authority "
- + "that generated " + descriptorFileName + ". Skipping.");
+ log.warn("Did not recognize the bridge authority that generated "
+ + "{}. Skipping.", descriptorFileName);
continue;
}
this.addBridgeNetworkStatus(
@@ -218,7 +211,7 @@ public class ConsensusStatsFileHandler {
if (keepImportHistory) {
reader.saveHistoryFile(historyFile);
}
- logger.info("Finished importing bridge descriptors.");
+ log.info("Finished importing bridge descriptors.");
}
}
@@ -285,18 +278,18 @@ public class ConsensusStatsFileHandler {
String line = "," + brunning + "," + brunningEc2;
/* Are our results new? */
if (!this.bridgesPerDay.containsKey(date)) {
- this.logger.finer("Adding new average bridge numbers: " + date + line);
+ log.debug("Adding new average bridge numbers: {}{}", date, line);
this.bridgesPerDay.put(date, line);
} else if (!line.equals(this.bridgesPerDay.get(date))) {
- this.logger.finer("Replacing existing average bridge numbers ("
- + this.bridgesPerDay.get(date) + " with new numbers: " + line);
+ log.debug("Replacing existing average bridge numbers ({} with new "
+ + "numbers: {}", this.bridgesPerDay.get(date), line);
this.bridgesPerDay.put(date, line);
}
}
/* Write raw numbers of running bridges to disk. */
- this.logger.fine("Writing file "
- + this.bridgeConsensusStatsRawFile.getAbsolutePath() + "...");
+ log.debug("Writing file {}...",
+ this.bridgeConsensusStatsRawFile.getAbsolutePath());
this.bridgeConsensusStatsRawFile.getParentFile().mkdirs();
try (BufferedWriter bw = new BufferedWriter(
new FileWriter(this.bridgeConsensusStatsRawFile))) {
@@ -306,12 +299,11 @@ public class ConsensusStatsFileHandler {
bw.append(line);
bw.newLine();
}
- this.logger.fine("Finished writing file "
- + this.bridgeConsensusStatsRawFile.getAbsolutePath() + ".");
+ log.debug("Finished writing file {}.",
+ this.bridgeConsensusStatsRawFile.getAbsolutePath());
} catch (IOException e) {
- this.logger.log(Level.WARNING, "Failed to write file "
- + this.bridgeConsensusStatsRawFile.getAbsolutePath() + "!",
- e);
+ log.warn("Failed to write file {}!",
+ this.bridgeConsensusStatsRawFile.getAbsolutePath(), e);
}
/* Add average number of bridges per day to the database. */
@@ -372,8 +364,7 @@ public class ConsensusStatsFileHandler {
conn.commit();
conn.close();
} catch (SQLException e) {
- logger.log(Level.WARNING, "Failed to add average bridge numbers "
- + "to database.", e);
+ log.warn("Failed to add average bridge numbers to database.", e);
}
}
@@ -394,14 +385,14 @@ public class ConsensusStatsFileHandler {
try {
if (now - 6L * 60L * 60L * 1000L > dateTimeFormat.parse(
this.bridgesRaw.lastKey()).getTime()) {
- logger.warning("Last known bridge status is more than 6 hours "
- + "old: " + this.bridgesRaw.lastKey());
+ log.warn("Last known bridge status is more than 6 hours old: {}",
+ this.bridgesRaw.lastKey());
}
} catch (ParseException e) {
- logger.warning("Can't parse the timestamp? Reason: " + e);
+ log.warn("Can't parse the timestamp? Reason: {}", e);
}
}
- logger.info(dumpStats.toString());
+ log.info(dumpStats.toString());
}
}
diff --git a/src/main/java/org/torproject/metrics/stats/servers/LockFile.java b/src/main/java/org/torproject/metrics/stats/servers/LockFile.java
index c6c63bb..c6063d1 100644
--- a/src/main/java/org/torproject/metrics/stats/servers/LockFile.java
+++ b/src/main/java/org/torproject/metrics/stats/servers/LockFile.java
@@ -3,29 +3,31 @@
package org.torproject.metrics.stats.servers;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
-import java.util.logging.Logger;
public class LockFile {
private File lockFile;
- private Logger logger;
+
+ private static Logger log = LoggerFactory.getLogger(LockFile.class);
public LockFile() {
this.lockFile = new File("lock");
- this.logger = Logger.getLogger(LockFile.class.getName());
}
/** Acquires the lock by checking whether a lock file already exists,
* and if not, by creating one with the current system time as
* content. */
public boolean acquireLock() {
- this.logger.fine("Trying to acquire lock...");
+ log.debug("Trying to acquire lock...");
try {
if (this.lockFile.exists()) {
BufferedReader br = new BufferedReader(new FileReader("lock"));
@@ -40,10 +42,10 @@ public class LockFile {
bw.append("").append(String.valueOf(System.currentTimeMillis()))
.append("\n");
bw.close();
- this.logger.fine("Acquired lock.");
+ log.debug("Acquired lock.");
return true;
} catch (IOException e) {
- this.logger.warning("Caught exception while trying to acquire "
+ log.warn("Caught exception while trying to acquire "
+ "lock!");
return false;
}
@@ -51,9 +53,9 @@ public class LockFile {
/** Releases the lock by deleting the lock file, if present. */
public void releaseLock() {
- this.logger.fine("Releasing lock...");
+ log.debug("Releasing lock...");
this.lockFile.delete();
- this.logger.fine("Released lock.");
+ log.debug("Released lock.");
}
}
diff --git a/src/main/java/org/torproject/metrics/stats/servers/LoggingConfiguration.java b/src/main/java/org/torproject/metrics/stats/servers/LoggingConfiguration.java
deleted file mode 100644
index a11c6c4..0000000
--- a/src/main/java/org/torproject/metrics/stats/servers/LoggingConfiguration.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/* Copyright 2011--2018 The Tor Project
- * See LICENSE for licensing information */
-
-package org.torproject.metrics.stats.servers;
-
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.TimeZone;
-import java.util.logging.ConsoleHandler;
-import java.util.logging.FileHandler;
-import java.util.logging.Formatter;
-import java.util.logging.Handler;
-import java.util.logging.Level;
-import java.util.logging.LogRecord;
-import java.util.logging.Logger;
-
-/**
- * Initialize logging configuration.
- *
- * <p>Log levels used by ERNIE:</p>
- *
- * <ul>
- * <li>SEVERE: An event made it impossible to continue program execution.
- * WARNING: A potential problem occurred that requires the operator to
- * look after the otherwise unattended setup</li>
- * <li>INFO: Messages on INFO level are meant to help the operator in
- * making sure that operation works as expected.</li>
- * <li>FINE: Debug messages that are used to identify problems and which
- * are turned on by default.</li>
- * <li>FINER: More detailed debug messages to investigate problems in more
- * detail. Not turned on by default. Increase log file limit when
- * using FINER.</li>
- * <li>FINEST: Most detailed debug messages. Not used.</li>
- * </ul>
- */
-public class LoggingConfiguration {
-
- /** Initializes the logging configuration. */
- public LoggingConfiguration() {
-
- /* Remove default console handler. */
- for (Handler h : Logger.getLogger("").getHandlers()) {
- Logger.getLogger("").removeHandler(h);
- }
-
- /* Disable logging of internal Sun classes. */
- Logger.getLogger("sun").setLevel(Level.OFF);
-
- /* Set minimum log level we care about from INFO to FINER. */
- Logger.getLogger("").setLevel(Level.FINER);
-
- /* Create log handler that writes messages on WARNING or higher to the
- * console. */
- final SimpleDateFormat dateTimeFormat =
- new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- Formatter cf = new Formatter() {
- public String format(LogRecord record) {
- return dateTimeFormat.format(new Date(record.getMillis())) + " "
- + record.getMessage() + "\n";
- }
- };
- Handler ch = new ConsoleHandler();
- ch.setFormatter(cf);
- ch.setLevel(Level.WARNING);
- Logger.getLogger("").addHandler(ch);
-
- /* Initialize own logger for this class. */
- Logger logger = Logger.getLogger(
- LoggingConfiguration.class.getName());
-
- /* Create log handler that writes all messages on FINE or higher to a
- * local file. */
- Formatter ff = new Formatter() {
- public String format(LogRecord record) {
- return dateTimeFormat.format(new Date(record.getMillis())) + " "
- + record.getLevel() + " " + record.getSourceClassName() + " "
- + record.getSourceMethodName() + " " + record.getMessage()
- + (record.getThrown() != null ? " " + record.getThrown() : "")
- + "\n";
- }
- };
- try {
- FileHandler fh = new FileHandler("log", 5000000, 5, true);
- fh.setFormatter(ff);
- fh.setLevel(Level.FINE);
- Logger.getLogger("").addHandler(fh);
- } catch (SecurityException e) {
- logger.log(Level.WARNING, "No permission to create log file. "
- + "Logging to file is disabled.", e);
- } catch (IOException e) {
- logger.log(Level.WARNING, "Could not write to log file. Logging to "
- + "file is disabled.", e);
- }
- }
-}
-
diff --git a/src/main/java/org/torproject/metrics/stats/servers/Main.java b/src/main/java/org/torproject/metrics/stats/servers/Main.java
index 5db867d..080b6e4 100644
--- a/src/main/java/org/torproject/metrics/stats/servers/Main.java
+++ b/src/main/java/org/torproject/metrics/stats/servers/Main.java
@@ -3,8 +3,10 @@
package org.torproject.metrics.stats.servers;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import java.io.File;
-import java.util.logging.Logger;
/**
* Coordinate downloading and parsing of descriptors and extraction of
@@ -12,14 +14,12 @@ import java.util.logging.Logger;
*/
public class Main {
+ private static Logger log = LoggerFactory.getLogger(Main.class);
+
/** Executes this data-processing module. */
public static void main(String[] args) {
- /* Initialize logging configuration. */
- new LoggingConfiguration();
-
- Logger logger = Logger.getLogger(Main.class.getName());
- logger.info("Starting ERNIE.");
+ log.info("Starting ERNIE.");
// Initialize configuration
Configuration config = new Configuration();
@@ -27,7 +27,7 @@ public class Main {
// Use lock file to avoid overlapping runs
LockFile lf = new LockFile();
if (!lf.acquireLock()) {
- logger.severe("Warning: ERNIE is already running or has not exited "
+ log.error("Warning: ERNIE is already running or has not exited "
+ "cleanly! Exiting!");
System.exit(1);
}
@@ -74,7 +74,7 @@ public class Main {
// Remove lock file
lf.releaseLock();
- logger.info("Terminating ERNIE.");
+ log.info("Terminating ERNIE.");
}
}
diff --git a/src/main/java/org/torproject/metrics/stats/servers/RelayDescriptorDatabaseImporter.java b/src/main/java/org/torproject/metrics/stats/servers/RelayDescriptorDatabaseImporter.java
index 750e231..c9a6fa7 100644
--- a/src/main/java/org/torproject/metrics/stats/servers/RelayDescriptorDatabaseImporter.java
+++ b/src/main/java/org/torproject/metrics/stats/servers/RelayDescriptorDatabaseImporter.java
@@ -13,6 +13,9 @@ import org.torproject.descriptor.ServerDescriptor;
import org.postgresql.util.PGbytea;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
@@ -37,8 +40,6 @@ import java.util.Set;
import java.util.SortedSet;
import java.util.TimeZone;
import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
/**
* Parse directory data.
@@ -130,10 +131,8 @@ public final class RelayDescriptorDatabaseImporter {
*/
private PreparedStatement psC;
- /**
- * Logger for this class.
- */
- private Logger logger;
+ private static Logger log
+ = LoggerFactory.getLogger(RelayDescriptorDatabaseImporter.class);
/**
* Directory for writing raw import files.
@@ -202,10 +201,6 @@ public final class RelayDescriptorDatabaseImporter {
this.statsDirectory = statsDirectory;
this.keepImportHistory = keepImportHistory;
- /* Initialize logger. */
- this.logger = Logger.getLogger(
- RelayDescriptorDatabaseImporter.class.getName());
-
if (connectionUrl != null) {
try {
/* Connect to database. */
@@ -244,8 +239,7 @@ public final class RelayDescriptorDatabaseImporter {
this.scheduledUpdates = new HashSet<>();
this.importIntoDatabase = true;
} catch (SQLException e) {
- this.logger.log(Level.WARNING, "Could not connect to database or "
- + "prepare statements.", e);
+ log.warn("Could not connect to database or prepare statements.", e);
}
}
@@ -271,7 +265,7 @@ public final class RelayDescriptorDatabaseImporter {
this.dateTimeFormat.format(timestamp).substring(0, 10)
+ " 00:00:00").getTime();
} catch (ParseException e) {
- this.logger.log(Level.WARNING, "Internal parsing error.", e);
+ log.warn("Internal parsing error.", e);
return;
}
if (!this.scheduledUpdates.contains(dateMillis)) {
@@ -341,9 +335,8 @@ public final class RelayDescriptorDatabaseImporter {
insertedStatusEntries.add(fingerprint);
}
} catch (SQLException e) {
- this.logger.log(Level.WARNING, "Could not add network status "
- + "consensus entry. We won't make any further SQL requests "
- + "in this execution.", e);
+ log.warn("Could not add network status consensus entry. We won't make "
+ + "any further SQL requests in this execution.", e);
this.importIntoDatabase = false;
}
}
@@ -387,7 +380,7 @@ public final class RelayDescriptorDatabaseImporter {
this.statusentryOut.write(PGbytea.toPGString(rawDescriptor)
.replaceAll("\\\\", "\\\\\\\\") + "\n");
} catch (IOException e) {
- this.logger.log(Level.WARNING, "Could not write network status "
+ log.warn("Could not write network status "
+ "consensus entry to raw database import file. We won't "
+ "make any further attempts to write raw import files in "
+ "this execution.", e);
@@ -444,7 +437,7 @@ public final class RelayDescriptorDatabaseImporter {
}
}
} catch (SQLException e) {
- this.logger.log(Level.WARNING, "Could not add server "
+ log.warn("Could not add server "
+ "descriptor. We won't make any further SQL requests in "
+ "this execution.", e);
this.importIntoDatabase = false;
@@ -472,7 +465,7 @@ public final class RelayDescriptorDatabaseImporter {
+ (extraInfoDigest != null ? extraInfoDigest : "\\N")
+ "\n");
} catch (IOException e) {
- this.logger.log(Level.WARNING, "Could not write server "
+ log.warn("Could not write server "
+ "descriptor to raw database import file. We won't make "
+ "any further attempts to write raw import files in this "
+ "execution.", e);
@@ -573,7 +566,7 @@ public final class RelayDescriptorDatabaseImporter {
for (String bandwidthHistoryString : bandwidthHistoryStrings) {
String[] parts = bandwidthHistoryString.split(" ");
if (parts.length != 6) {
- this.logger.finer("Bandwidth history line does not have expected "
+ log.debug("Bandwidth history line does not have expected "
+ "number of elements. Ignoring this line.");
continue;
}
@@ -581,14 +574,13 @@ public final class RelayDescriptorDatabaseImporter {
try {
intervalLength = Long.parseLong(parts[3].substring(1));
} catch (NumberFormatException e) {
- this.logger.fine("Bandwidth history line does not have valid "
- + "interval length '" + parts[3] + " " + parts[4] + "'. "
- + "Ignoring this line.");
+ log.debug("Bandwidth history line does not have valid interval length "
+ + "'{} {}'. Ignoring this line.", parts[3], parts[4]);
continue;
}
String[] values = parts[5].split(",");
if (intervalLength % 900L != 0L) {
- this.logger.fine("Bandwidth history line does not contain "
+ log.debug("Bandwidth history line does not contain "
+ "multiples of 15-minute intervals. Ignoring this line.");
continue;
} else if (intervalLength != 900L) {
@@ -606,7 +598,7 @@ public final class RelayDescriptorDatabaseImporter {
values = newValues;
intervalLength = 900L;
} catch (NumberFormatException e) {
- this.logger.fine("Number format exception while parsing "
+ log.debug("Number format exception while parsing "
+ "bandwidth history line. Ignoring this line.");
continue;
}
@@ -620,16 +612,15 @@ public final class RelayDescriptorDatabaseImporter {
dateStart = dateTimeFormat.parse(parts[1] + " 00:00:00")
.getTime();
} catch (ParseException e) {
- this.logger.fine("Parse exception while parsing timestamp in "
+ log.debug("Parse exception while parsing timestamp in "
+ "bandwidth history line. Ignoring this line.");
continue;
}
if (Math.abs(published - intervalEnd)
> 7L * 24L * 60L * 60L * 1000L) {
- this.logger.fine("Extra-info descriptor publication time "
- + dateTimeFormat.format(published) + " and last interval "
- + "time " + intervalEndTime + " in " + type + " line differ "
- + "by more than 7 days! Not adding this line!");
+ log.debug("Extra-info descriptor publication time {} and last interval "
+ + "time {} in {} line differ by more than 7 days! Not adding this "
+ + "line!", dateTimeFormat.format(published), intervalEndTime, type);
continue;
}
long currentIntervalEnd = intervalEnd;
@@ -655,7 +646,7 @@ public final class RelayDescriptorDatabaseImporter {
currentIntervalEnd -= intervalLength * 1000L;
}
} catch (NumberFormatException e) {
- this.logger.fine("Number format exception while parsing "
+ log.debug("Number format exception while parsing "
+ "bandwidth history line. Ignoring this line.");
continue;
}
@@ -703,7 +694,7 @@ public final class RelayDescriptorDatabaseImporter {
this.csH.executeBatch();
}
} catch (SQLException | ParseException e) {
- this.logger.log(Level.WARNING, "Could not insert bandwidth "
+ log.warn("Could not insert bandwidth "
+ "history line into database. We won't make any "
+ "further SQL requests in this execution.", e);
this.importIntoDatabase = false;
@@ -722,7 +713,7 @@ public final class RelayDescriptorDatabaseImporter {
+ dirreadIntArray.toString() + "','"
+ dirwrittenIntArray.toString() + "');\n");
} catch (IOException e) {
- this.logger.log(Level.WARNING, "Could not write bandwidth "
+ log.warn("Could not write bandwidth "
+ "history to raw database import file. We won't make "
+ "any further attempts to write raw import files in "
+ "this execution.", e);
@@ -797,7 +788,7 @@ public final class RelayDescriptorDatabaseImporter {
}
}
} catch (SQLException e) {
- this.logger.log(Level.WARNING, "Could not add network status "
+ log.warn("Could not add network status "
+ "consensus. We won't make any further SQL requests in "
+ "this execution.", e);
this.importIntoDatabase = false;
@@ -815,7 +806,7 @@ public final class RelayDescriptorDatabaseImporter {
String validAfterString = this.dateTimeFormat.format(validAfter);
this.consensusOut.write(validAfterString + "\n");
} catch (IOException e) {
- this.logger.log(Level.WARNING, "Could not write network status "
+ log.warn("Could not write network status "
+ "consensus to raw database import file. We won't make "
+ "any further attempts to write raw import files in this "
+ "execution.", e);
@@ -826,7 +817,7 @@ public final class RelayDescriptorDatabaseImporter {
/** Imports relay descriptors into the database. */
public void importRelayDescriptors() {
- logger.fine("Importing files in directories " + archivesDirectories
+ log.info("Importing files in directories " + archivesDirectories
+ "/...");
if (!this.archivesDirectories.isEmpty()) {
DescriptorReader reader =
@@ -854,7 +845,7 @@ public final class RelayDescriptorDatabaseImporter {
}
}
- logger.info("Finished importing relay descriptors.");
+ log.info("Finished importing relay descriptors.");
}
private void addRelayNetworkStatusConsensus(
@@ -913,12 +904,12 @@ public final class RelayDescriptorDatabaseImporter {
public void closeConnection() {
/* Log stats about imported descriptors. */
- this.logger.info(String.format("Finished importing relay "
- + "descriptors: %d consensuses, %d network status entries, %d "
- + "votes, %d server descriptors, %d extra-info descriptors, %d "
- + "bandwidth history elements, and %d dirreq stats elements",
+ log.info("Finished importing relay "
+ + "descriptors: {} consensuses, {} network status entries, {} "
+ + "votes, {} server descriptors, {} extra-info descriptors, {} "
+ + "bandwidth history elements, and {} dirreq stats elements",
rcsCount, rrsCount, rvsCount, rdsCount, resCount, rhsCount,
- rqsCount));
+ rqsCount);
/* Insert scheduled updates a second time, just in case the refresh
* run has started since inserting them the first time in which case
@@ -933,7 +924,7 @@ public final class RelayDescriptorDatabaseImporter {
this.psU.execute();
}
} catch (SQLException e) {
- this.logger.log(Level.WARNING, "Could not add scheduled dates "
+ log.warn("Could not add scheduled dates "
+ "for the next refresh run.", e);
}
}
@@ -945,14 +936,12 @@ public final class RelayDescriptorDatabaseImporter {
this.conn.commit();
} catch (SQLException e) {
- this.logger.log(Level.WARNING, "Could not commit final records "
- + "to database", e);
+ log.warn("Could not commit final records to database", e);
}
try {
this.conn.close();
} catch (SQLException e) {
- this.logger.log(Level.WARNING, "Could not close database "
- + "connection.", e);
+ log.warn("Could not close database connection.", e);
}
}
@@ -975,8 +964,7 @@ public final class RelayDescriptorDatabaseImporter {
this.consensusOut.close();
}
} catch (IOException e) {
- this.logger.log(Level.WARNING, "Could not close one or more raw "
- + "database import files.", e);
+ log.warn("Could not close one or more raw database import files.", e);
}
}
}
_______________________________________________
tor-commits mailing list
tor-commits@xxxxxxxxxxxxxxxxxxxx
https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-commits