[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]
[tor-commits] [onionoo/master] Simplify logging configuration.
commit 7b0e856574067f52d012f744286aaf9903f68cb8
Author: Karsten Loesing <karsten.loesing@xxxxxxx>
Date: Tue Mar 31 12:04:49 2020 +0200
Simplify logging configuration.
Implements #33549.
---
CHANGELOG.md | 3 +
src/build | 2 +-
.../org/torproject/metrics/onionoo/cron/Main.java | 42 +++++------
.../metrics/onionoo/docs/BandwidthStatus.java | 6 +-
.../metrics/onionoo/docs/ClientsHistory.java | 19 ++---
.../metrics/onionoo/docs/ClientsStatus.java | 4 +-
.../metrics/onionoo/docs/DateTimeHelper.java | 6 +-
.../metrics/onionoo/docs/DocumentStore.java | 68 +++++++++---------
.../metrics/onionoo/docs/NodeStatus.java | 20 +++---
.../metrics/onionoo/docs/UpdateStatus.java | 5 +-
.../metrics/onionoo/docs/UptimeHistory.java | 14 ++--
.../metrics/onionoo/docs/UptimeStatus.java | 4 +-
.../metrics/onionoo/docs/WeightsStatus.java | 16 ++---
.../metrics/onionoo/server/NodeIndexer.java | 6 +-
.../metrics/onionoo/server/PerformanceMetrics.java | 29 ++++----
.../metrics/onionoo/server/ServerMain.java | 6 +-
.../metrics/onionoo/updater/DescriptorQueue.java | 19 ++---
.../metrics/onionoo/updater/DescriptorSource.java | 32 +++++----
.../metrics/onionoo/updater/LookupService.java | 34 ++++-----
.../onionoo/updater/NodeDetailsStatusUpdater.java | 21 +++---
.../onionoo/updater/StatusUpdateRunner.java | 15 ++--
.../metrics/onionoo/util/FormattingUtils.java | 4 +-
.../onionoo/writer/BandwidthDocumentWriter.java | 4 +-
.../onionoo/writer/ClientsDocumentWriter.java | 4 +-
.../onionoo/writer/DetailsDocumentWriter.java | 4 +-
.../onionoo/writer/DocumentWriterRunner.java | 6 +-
.../onionoo/writer/SummaryDocumentWriter.java | 4 +-
.../onionoo/writer/UptimeDocumentWriter.java | 4 +-
.../onionoo/writer/WeightsDocumentWriter.java | 4 +-
src/main/resources/logback.xml | 82 ----------------------
30 files changed, 209 insertions(+), 278 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d5877d3..f8666fd 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
# Changes in version 8.0-1.2?.? - 2020-0?-??
+ * Minor changes
+ - Simplify logging configuration.
+
# Changes in version 8.0-1.25.0 - 2020-02-20
diff --git a/src/build b/src/build
index 264e498..fd85646 160000
--- a/src/build
+++ b/src/build
@@ -1 +1 @@
-Subproject commit 264e498f54a20f7d299daaf2533d043f880e6a8b
+Subproject commit fd856466bcb260f53ef69a24c102d0e49d171cc3
diff --git a/src/main/java/org/torproject/metrics/onionoo/cron/Main.java b/src/main/java/org/torproject/metrics/onionoo/cron/Main.java
index e79edf5..9f3c3c6 100644
--- a/src/main/java/org/torproject/metrics/onionoo/cron/Main.java
+++ b/src/main/java/org/torproject/metrics/onionoo/cron/Main.java
@@ -24,7 +24,7 @@ public class Main implements Runnable {
private Main() {}
- private Logger log = LoggerFactory.getLogger(Main.class);
+ private static final Logger logger = LoggerFactory.getLogger(Main.class);
/** Executes a single update run or partial update run, or initiates
* hourly executions, depending on the given command-line arguments. */
@@ -98,7 +98,7 @@ public class Main implements Runnable {
private void runOrScheduleExecutions() {
if (!this.defaultMode) {
- this.log.info("Going to run one-time updater ... ");
+ logger.info("Going to run one-time updater ... ");
this.run();
} else {
this.scheduleExecutions();
@@ -109,13 +109,13 @@ public class Main implements Runnable {
Executors.newScheduledThreadPool(1);
private void scheduleExecutions() {
- this.log.info("Periodic updater started.");
+ logger.info("Periodic updater started.");
final Runnable mainRunnable = this;
int currentMinute = Calendar.getInstance().get(Calendar.MINUTE);
int initialDelay = (75 - currentMinute + currentMinute % 5) % 60;
/* Run after initialDelay delay and then every hour. */
- this.log.info("Periodic updater will start every hour at minute {}.",
+ logger.info("Periodic updater will start every hour at minute {}.",
(currentMinute + initialDelay) % 60);
this.scheduler.scheduleAtFixedRate(mainRunnable, initialDelay, 60,
TimeUnit.MINUTES);
@@ -143,23 +143,23 @@ public class Main implements Runnable {
private DocumentWriterRunner dwr;
private void initialize() {
- this.log.debug("Started update ...");
+ logger.debug("Started update ...");
if (!this.writeOnly) {
this.dso = DescriptorSourceFactory.getDescriptorSource();
- this.log.info("Initialized descriptor source");
+ logger.info("Initialized descriptor source");
}
if (!this.downloadOnly) {
this.ds = DocumentStoreFactory.getDocumentStore();
- this.log.info("Initialized document store");
+ logger.info("Initialized document store");
}
if (!this.downloadOnly && !this.writeOnly) {
this.sur = new StatusUpdateRunner();
- this.log.info("Initialized status update runner");
+ logger.info("Initialized status update runner");
}
if (!this.downloadOnly && !this.updateOnly) {
this.ds.setOutDir(outDir);
this.dwr = new DocumentWriterRunner();
- this.log.info("Initialized document writer runner");
+ logger.info("Initialized document writer runner");
}
}
@@ -167,7 +167,7 @@ public class Main implements Runnable {
if (this.updateOnly || this.writeOnly) {
return;
}
- this.log.info("Downloading descriptors.");
+ logger.info("Downloading descriptors.");
this.dso.downloadDescriptors();
}
@@ -175,9 +175,9 @@ public class Main implements Runnable {
if (this.downloadOnly || this.writeOnly) {
return;
}
- this.log.info("Reading descriptors.");
+ logger.info("Reading descriptors.");
this.dso.readDescriptors();
- this.log.info("Updating internal status files.");
+ logger.info("Updating internal status files.");
this.sur.updateStatuses();
}
@@ -185,24 +185,24 @@ public class Main implements Runnable {
if (this.downloadOnly || this.updateOnly) {
return;
}
- log.info("Updating document files.");
+ logger.info("Updating document files.");
this.dwr.writeDocuments();
}
private void shutDown() {
- log.info("Shutting down.");
+ logger.info("Shutting down.");
if (this.dso != null) {
this.dso.writeHistoryFiles();
- log.info("Wrote parse histories");
+ logger.info("Wrote parse histories");
}
if (this.ds != null) {
this.ds.flushDocumentCache();
- this.log.info("Flushed document cache");
+ logger.info("Flushed document cache");
}
}
private void gatherStatistics() {
- this.log.info("Gathering statistics.");
+ logger.info("Gathering statistics.");
if (this.sur != null) {
this.sur.logStatistics();
}
@@ -210,23 +210,23 @@ public class Main implements Runnable {
this.dwr.logStatistics();
}
if (this.dso != null) {
- this.log.info("Descriptor source\n{}", this.dso.getStatsString());
+ logger.info("Descriptor source\n{}", this.dso.getStatsString());
}
if (this.ds != null) {
- this.log.info("Document store\n{}", this.ds.getStatsString());
+ logger.info("Document store\n{}", this.ds.getStatsString());
}
}
private void cleanUp() {
/* Clean up to prevent out-of-memory exception, and to ensure that the
* next execution starts with a fresh descriptor source. */
- this.log.info("Cleaning up.");
+ logger.info("Cleaning up.");
if (this.ds != null) {
this.ds.invalidateDocumentCache();
}
DocumentStoreFactory.setDocumentStore(null);
DescriptorSourceFactory.setDescriptorSource(null);
- this.log.info("Done.");
+ logger.info("Done.");
}
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/docs/BandwidthStatus.java b/src/main/java/org/torproject/metrics/onionoo/docs/BandwidthStatus.java
index a3ceb69..2a68de6 100644
--- a/src/main/java/org/torproject/metrics/onionoo/docs/BandwidthStatus.java
+++ b/src/main/java/org/torproject/metrics/onionoo/docs/BandwidthStatus.java
@@ -15,7 +15,7 @@ import java.util.TreeMap;
public class BandwidthStatus extends Document {
- private static Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
BandwidthStatus.class);
private transient boolean isDirty = false;
@@ -55,7 +55,7 @@ public class BandwidthStatus extends Document {
String line = s.nextLine();
String[] parts = line.split(" ");
if (parts.length != 6) {
- log.error("Illegal line '{}' in bandwidth history. Skipping this "
+ logger.error("Illegal line '{}' in bandwidth history. Skipping this "
+ "line.", line);
continue;
}
@@ -64,7 +64,7 @@ public class BandwidthStatus extends Document {
long startMillis = DateTimeHelper.parse(parts[1] + " " + parts[2]);
long endMillis = DateTimeHelper.parse(parts[3] + " " + parts[4]);
if (startMillis < 0L || endMillis < 0L) {
- log.error("Could not parse timestamp while reading "
+ logger.error("Could not parse timestamp while reading "
+ "bandwidth history. Skipping.");
break;
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/docs/ClientsHistory.java b/src/main/java/org/torproject/metrics/onionoo/docs/ClientsHistory.java
index bab618e..89cc135 100644
--- a/src/main/java/org/torproject/metrics/onionoo/docs/ClientsHistory.java
+++ b/src/main/java/org/torproject/metrics/onionoo/docs/ClientsHistory.java
@@ -12,7 +12,7 @@ import java.util.TreeMap;
public class ClientsHistory implements Comparable<ClientsHistory> {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
ClientsHistory.class);
private long startMillis;
@@ -73,27 +73,27 @@ public class ClientsHistory implements Comparable<ClientsHistory> {
String responseHistoryString) {
String[] parts = responseHistoryString.split(" ", 8);
if (parts.length != 8) {
- log.warn("Invalid number of space-separated strings in clients history: "
- + "'{}'. Skipping", responseHistoryString);
+ logger.warn("Invalid number of space-separated strings in clients "
+ + "history: '{}'. Skipping", responseHistoryString);
return null;
}
long startMillis = DateTimeHelper.parse(parts[0] + " " + parts[1]);
long endMillis = DateTimeHelper.parse(parts[2] + " " + parts[3]);
if (startMillis < 0L || endMillis < 0L) {
- log.warn("Invalid start or end timestamp in clients history: '{}'. "
+ logger.warn("Invalid start or end timestamp in clients history: '{}'. "
+ "Skipping.", responseHistoryString);
return null;
}
if (startMillis >= endMillis) {
- log.warn("Start timestamp must be smaller than end timestamp in clients "
- + "history: '{}'. Skipping.", responseHistoryString);
+ logger.warn("Start timestamp must be smaller than end timestamp in "
+ + "clients history: '{}'. Skipping.", responseHistoryString);
return null;
}
double totalResponses;
try {
totalResponses = Double.parseDouble(parts[4]);
} catch (NumberFormatException e) {
- log.warn("Invalid response number format in clients history: '{}'. "
+ logger.warn("Invalid response number format in clients history: '{}'. "
+ "Skipping.", responseHistoryString);
return null;
}
@@ -105,8 +105,9 @@ public class ClientsHistory implements Comparable<ClientsHistory> {
parseResponses(parts[7]);
if (responsesByCountry == null || responsesByTransport == null
|| responsesByVersion == null) {
- log.warn("Invalid format of responses by country, transport, or version "
- + "in clients history: '{}'. Skipping.", responseHistoryString);
+ logger.warn("Invalid format of responses by country, transport, or "
+ + "version in clients history: '{}'. Skipping.",
+ responseHistoryString);
return null;
}
return new ClientsHistory(startMillis, endMillis, totalResponses,
diff --git a/src/main/java/org/torproject/metrics/onionoo/docs/ClientsStatus.java b/src/main/java/org/torproject/metrics/onionoo/docs/ClientsStatus.java
index 09899b6..19d2e7f 100644
--- a/src/main/java/org/torproject/metrics/onionoo/docs/ClientsStatus.java
+++ b/src/main/java/org/torproject/metrics/onionoo/docs/ClientsStatus.java
@@ -12,7 +12,7 @@ import java.util.TreeSet;
public class ClientsStatus extends Document {
- private static Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
ClientsStatus.class);
private transient boolean isDirty = false;
@@ -44,7 +44,7 @@ public class ClientsStatus extends Document {
if (parsedLine != null) {
this.history.add(parsedLine);
} else {
- log.error("Could not parse clients history line '{}'. Skipping.",
+ logger.error("Could not parse clients history line '{}'. Skipping.",
line);
}
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/docs/DateTimeHelper.java b/src/main/java/org/torproject/metrics/onionoo/docs/DateTimeHelper.java
index e49b48a..b0bda36 100644
--- a/src/main/java/org/torproject/metrics/onionoo/docs/DateTimeHelper.java
+++ b/src/main/java/org/torproject/metrics/onionoo/docs/DateTimeHelper.java
@@ -17,7 +17,7 @@ public class DateTimeHelper {
public static final long NO_TIME_AVAILABLE = -1L;
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
DateTimeHelper.class);
private DateTimeHelper() {
@@ -99,13 +99,13 @@ public class DateTimeHelper {
* string cannot be parsed. */
public static long parse(String string, String format) {
if (null == string) {
- log.warn("Date String was null.");
+ logger.warn("Date String was null.");
return NO_TIME_AVAILABLE;
}
try {
return getDateFormat(format).parse(string).getTime();
} catch (ParseException e) {
- log.warn(e.getMessage(), e);
+ logger.warn(e.getMessage(), e);
return NO_TIME_AVAILABLE;
}
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/docs/DocumentStore.java b/src/main/java/org/torproject/metrics/onionoo/docs/DocumentStore.java
index 4ad6709..e74094a 100644
--- a/src/main/java/org/torproject/metrics/onionoo/docs/DocumentStore.java
+++ b/src/main/java/org/torproject/metrics/onionoo/docs/DocumentStore.java
@@ -41,7 +41,7 @@ import java.util.TreeSet;
// TODO Also look into simple key-value stores instead of real databases.
public class DocumentStore {
- private static Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
DocumentStore.class);
private static ObjectMapper objectMapper = new ObjectMapper();
@@ -143,8 +143,8 @@ public class DocumentStore {
this.listedFiles += parsedNodeStatuses.size();
this.listOperations++;
} catch (IOException e) {
- log.error("Could not read file '{}'.", summaryFile.getAbsolutePath(),
- e);
+ logger.error("Could not read file '{}'.",
+ summaryFile.getAbsolutePath(), e);
}
}
}
@@ -186,7 +186,7 @@ public class DocumentStore {
this.listedFiles += parsedSummaryDocuments.size();
this.listOperations++;
} catch (IOException e) {
- log.error("Could not parse summary document '{}' from file '{}'.",
+ logger.error("Could not parse summary document '{}' from file '{}'.",
line, summaryFile.getAbsolutePath(), e);
}
}
@@ -311,7 +311,7 @@ public class DocumentStore {
try {
documentString = objectMapper.writeValueAsString(document);
} catch (JsonProcessingException e) {
- log.error("Serializing failed for type {}.",
+ logger.error("Serializing failed for type {}.",
document.getClass().getName(), e);
return false;
}
@@ -328,7 +328,7 @@ public class DocumentStore {
documentString = FormattingUtils.replaceValidUtf(
objectMapper.writeValueAsString(document));
} catch (JsonProcessingException e) {
- log.error("Serializing failed for type {}.",
+ logger.error("Serializing failed for type {}.",
document.getClass().getName(), e);
return false;
}
@@ -347,13 +347,13 @@ public class DocumentStore {
|| document instanceof UpdateStatus) {
documentString = document.toDocumentString();
} else {
- log.error("Serializing is not supported for type {}.",
+ logger.error("Serializing is not supported for type {}.",
document.getClass().getName());
return false;
}
try {
if (documentString.length() > ONE_MIBIBYTE) {
- log.warn("Attempting to store very large document file: path='{}', "
+ logger.warn("Attempting to store very large document file: path='{}', "
+ "bytes={}", documentFile.getAbsolutePath(),
documentString.length());
}
@@ -377,7 +377,7 @@ public class DocumentStore {
this.storedFiles++;
this.storedBytes += documentString.length();
} catch (IOException e) {
- log.error("Could not write file '{}'.", documentFile.getAbsolutePath(),
+ logger.error("Could not write file '{}'.", documentFile.getAbsolutePath(),
e);
return false;
}
@@ -438,10 +438,10 @@ public class DocumentStore {
String contact = null;
for (String orAddressAndPort : detailsDocument.getOrAddresses()) {
if (!orAddressAndPort.contains(":")) {
- log.warn("Attempt to create summary document from details document for "
- + "fingerprint {} failed because of invalid OR address/port: '{}'. "
- + "Not returning a summary document in this case.", fingerprint,
- orAddressAndPort);
+ logger.warn("Attempt to create summary document from details document "
+ + "for fingerprint {} failed because of invalid OR address/port: "
+ + "'{}'. Not returning a summary document in this case.",
+ fingerprint, orAddressAndPort);
return null;
}
String orAddress = orAddressAndPort.substring(0,
@@ -482,7 +482,7 @@ public class DocumentStore {
/* Document file does not exist. That's okay. */
return null;
} else if (documentFile.isDirectory()) {
- log.error("Could not read file '{}', because it is a directory.",
+ logger.error("Could not read file '{}', because it is a directory.",
documentFile.getAbsolutePath());
return null;
}
@@ -504,11 +504,12 @@ public class DocumentStore {
this.retrievedFiles++;
this.retrievedBytes += documentString.length();
} catch (IOException e) {
- log.error("Could not read file '{}'.", documentFile.getAbsolutePath(), e);
+ logger.error("Could not read file '{}'.", documentFile.getAbsolutePath(),
+ e);
return null;
}
if (documentString.length() > ONE_MIBIBYTE) {
- log.warn("Retrieved very large document file: path='{}', bytes={}",
+ logger.warn("Retrieved very large document file: path='{}', bytes={}",
documentFile.getAbsolutePath(), documentString.length());
}
T result = null;
@@ -532,7 +533,7 @@ public class DocumentStore {
return this.retrieveParsedDocumentFile(documentType, "{"
+ documentString + "}");
} else {
- log.error("Parsing is not supported for type {}.",
+ logger.error("Parsing is not supported for type {}.",
documentType.getName());
}
return result;
@@ -546,10 +547,10 @@ public class DocumentStore {
result.setFromDocumentString(documentString);
} catch (ReflectiveOperationException e) {
/* Handle below. */
- log.error(e.getMessage(), e);
+ logger.error(e.getMessage(), e);
}
if (result == null) {
- log.error("Could not initialize parsed status file of type {}.",
+ logger.error("Could not initialize parsed status file of type {}.",
documentType.getName());
}
return result;
@@ -562,11 +563,11 @@ public class DocumentStore {
result = objectMapper.readValue(documentString, documentType);
} catch (Throwable e) {
/* Handle below. */
- log.error(documentString);
- log.error(e.getMessage(), e);
+ logger.error(documentString);
+ logger.error(e.getMessage(), e);
}
if (result == null) {
- log.error("Could not initialize parsed document of type {}.",
+ logger.error("Could not initialize parsed document of type {}.",
documentType.getName());
}
return result;
@@ -580,10 +581,10 @@ public class DocumentStore {
result.setDocumentString(documentString);
} catch (ReflectiveOperationException e) {
/* Handle below. */
- log.error(e.getMessage(), e);
+ logger.error(e.getMessage(), e);
}
if (result == null) {
- log.error("Could not initialize unparsed document of type {}.",
+ logger.error("Could not initialize unparsed document of type {}.",
documentType.getName());
}
return result;
@@ -626,7 +627,8 @@ public class DocumentStore {
Class<T> documentType, String fingerprint) {
File documentFile = this.getDocumentFile(documentType, fingerprint);
if (documentFile == null || !documentFile.delete()) {
- log.error("Could not delete file '{}'.", documentFile.getAbsolutePath());
+ logger.error("Could not delete file '{}'.",
+ documentFile.getAbsolutePath());
return false;
}
this.removedFiles++;
@@ -638,7 +640,7 @@ public class DocumentStore {
File documentFile = null;
if (fingerprint == null && !documentType.equals(UpdateStatus.class)
&& !documentType.equals(UptimeStatus.class)) {
- log.warn("Attempted to locate a document file of type {} without "
+ logger.warn("Attempted to locate a document file of type {} without "
+ "providing a fingerprint. Such a file does not exist.",
documentType.getName());
return null;
@@ -732,7 +734,7 @@ public class DocumentStore {
private void writeNodeStatuses() {
File directory = this.statusDir;
if (directory == null) {
- log.error("Unable to write node statuses without knowing the "
+ logger.error("Unable to write node statuses without knowing the "
+ "'status' directory to write to!");
return;
}
@@ -753,7 +755,7 @@ public class DocumentStore {
if (line != null) {
sb.append(line).append("\n");
} else {
- log.error("Could not serialize relay node status '{}'",
+ logger.error("Could not serialize relay node status '{}'",
relay.getFingerprint());
}
}
@@ -762,7 +764,7 @@ public class DocumentStore {
if (line != null) {
sb.append(line).append("\n");
} else {
- log.error("Could not serialize bridge node status '{}'",
+ logger.error("Could not serialize bridge node status '{}'",
bridge.getFingerprint());
}
}
@@ -775,7 +777,8 @@ public class DocumentStore {
this.storedFiles++;
this.storedBytes += documentString.length();
} catch (IOException e) {
- log.error("Could not write file '{}'.", summaryFile.getAbsolutePath(), e);
+ logger.error("Could not write file '{}'.", summaryFile.getAbsolutePath(),
+ e);
}
}
@@ -804,7 +807,7 @@ public class DocumentStore {
if (line != null) {
sb.append(line).append("\n");
} else {
- log.error("Could not serialize relay summary document '{}'",
+ logger.error("Could not serialize relay summary document '{}'",
summaryDocument.getFingerprint());
}
}
@@ -818,7 +821,8 @@ public class DocumentStore {
this.storedFiles++;
this.storedBytes += documentString.length();
} catch (IOException e) {
- log.error("Could not write file '{}'.", summaryFile.getAbsolutePath(), e);
+ logger.error("Could not write file '{}'.", summaryFile.getAbsolutePath(),
+ e);
}
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/docs/NodeStatus.java b/src/main/java/org/torproject/metrics/onionoo/docs/NodeStatus.java
index 53cd9ec..e343045 100644
--- a/src/main/java/org/torproject/metrics/onionoo/docs/NodeStatus.java
+++ b/src/main/java/org/torproject/metrics/onionoo/docs/NodeStatus.java
@@ -77,7 +77,7 @@ import java.util.stream.Collectors;
*/
public class NodeStatus extends Document {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
NodeStatus.class);
/* From most recently published server descriptor: */
@@ -550,7 +550,7 @@ public class NodeStatus extends Document {
try {
String[] parts = documentString.trim().split("\t");
if (parts.length < 23) {
- log.error("Too few space-separated values in line '{}'. Skipping.",
+ logger.error("Too few space-separated values in line '{}'. Skipping.",
documentString.trim());
return null;
}
@@ -565,7 +565,7 @@ public class NodeStatus extends Document {
if (addresses.contains(";")) {
String[] addressParts = addresses.split(";", -1);
if (addressParts.length != 3) {
- log.error("Invalid addresses entry in line '{}'. Skipping.",
+ logger.error("Invalid addresses entry in line '{}'. Skipping.",
documentString.trim());
return null;
}
@@ -587,11 +587,11 @@ public class NodeStatus extends Document {
long lastSeenMillis = DateTimeHelper.parse(parts[4] + " "
+ parts[5]);
if (lastSeenMillis < 0L) {
- log.error("Parse exception while parsing node status line '{}'. "
+ logger.error("Parse exception while parsing node status line '{}'. "
+ "Skipping.", documentString);
return null;
} else if (lastSeenMillis == 0L) {
- log.debug("Skipping node status with fingerprint {} that has so far "
+ logger.debug("Skipping node status with fingerprint {} that has so far "
+ "never been seen in a network status.", fingerprint);
return null;
}
@@ -614,7 +614,7 @@ public class NodeStatus extends Document {
}
long firstSeenMillis = DateTimeHelper.parse(parts[15] + " " + parts[16]);
if (firstSeenMillis < 0L) {
- log.error("Parse exception while parsing node status line '{}'. "
+ logger.error("Parse exception while parsing node status line '{}'. "
+ "Skipping.", documentString);
return null;
}
@@ -624,7 +624,7 @@ public class NodeStatus extends Document {
lastChangedAddresses = DateTimeHelper.parse(parts[17] + " "
+ parts[18]);
if (lastChangedAddresses < 0L) {
- log.error("Parse exception while parsing node status line '{}'. "
+ logger.error("Parse exception while parsing node status line '{}'. "
+ "Skipping.", documentString);
return null;
}
@@ -690,13 +690,13 @@ public class NodeStatus extends Document {
}
return nodeStatus;
} catch (NumberFormatException e) {
- log.error("Number format exception while parsing node status line '{}'. "
- + "Skipping.", documentString, e);
+ logger.error("Number format exception while parsing node status line "
+ + "'{}'. Skipping.", documentString, e);
return null;
} catch (Exception e) {
/* This catch block is only here to handle yet unknown errors. It
* should go away once we're sure what kind of errors can occur. */
- log.error("Unknown exception while parsing node status line '{}'. "
+ logger.error("Unknown exception while parsing node status line '{}'. "
+ "Skipping.", documentString, e);
return null;
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/docs/UpdateStatus.java b/src/main/java/org/torproject/metrics/onionoo/docs/UpdateStatus.java
index 10b6123..a840585 100644
--- a/src/main/java/org/torproject/metrics/onionoo/docs/UpdateStatus.java
+++ b/src/main/java/org/torproject/metrics/onionoo/docs/UpdateStatus.java
@@ -8,7 +8,8 @@ import org.slf4j.LoggerFactory;
public class UpdateStatus extends Document {
- private static Logger log = LoggerFactory.getLogger(UpdateStatus.class);
+ private static final Logger logger = LoggerFactory.getLogger(
+ UpdateStatus.class);
private long updatedMillis;
@@ -25,7 +26,7 @@ public class UpdateStatus extends Document {
try {
this.updatedMillis = Long.parseLong(documentString.trim());
} catch (NumberFormatException e) {
- log.error("Could not parse timestamp '{}'. Setting to 1970-01-01 "
+ logger.error("Could not parse timestamp '{}'. Setting to 1970-01-01 "
+ "00:00:00.", documentString);
this.updatedMillis = 0L;
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/docs/UptimeHistory.java b/src/main/java/org/torproject/metrics/onionoo/docs/UptimeHistory.java
index 07145e4..595a165 100644
--- a/src/main/java/org/torproject/metrics/onionoo/docs/UptimeHistory.java
+++ b/src/main/java/org/torproject/metrics/onionoo/docs/UptimeHistory.java
@@ -12,7 +12,7 @@ import java.util.TreeSet;
public class UptimeHistory implements Comparable<UptimeHistory> {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
UptimeHistory.class);
private boolean relay;
@@ -58,22 +58,22 @@ public class UptimeHistory implements Comparable<UptimeHistory> {
public static UptimeHistory fromString(String uptimeHistoryString) {
String[] parts = uptimeHistoryString.split(" ", -1);
if (parts.length < 3) {
- log.warn("Invalid number of space-separated strings in uptime history: "
- + "'{}'. Skipping", uptimeHistoryString);
+ logger.warn("Invalid number of space-separated strings in uptime "
+ + "history: '{}'. Skipping", uptimeHistoryString);
return null;
}
boolean relay = false;
if (parts[0].equalsIgnoreCase("r")) {
relay = true;
} else if (!parts[0].equals("b")) {
- log.warn("Invalid node type in uptime history: '{}'. Supported types are "
- + "'r', 'R', and 'b'. Skipping.", uptimeHistoryString);
+ logger.warn("Invalid node type in uptime history: '{}'. Supported types "
+ + "are 'r', 'R', and 'b'. Skipping.", uptimeHistoryString);
return null;
}
long startMillis = DateTimeHelper.parse(parts[1],
DateTimeHelper.DATEHOUR_NOSPACE_FORMAT);
if (DateTimeHelper.NO_TIME_AVAILABLE == startMillis) {
- log.warn("Invalid start timestamp in uptime history: '{}'. Skipping.",
+ logger.warn("Invalid start timestamp in uptime history: '{}'. Skipping.",
uptimeHistoryString);
return null;
}
@@ -81,7 +81,7 @@ public class UptimeHistory implements Comparable<UptimeHistory> {
try {
uptimeHours = Integer.parseInt(parts[2]);
} catch (NumberFormatException e) {
- log.warn("Invalid number format in uptime history: '{}'. Skipping.",
+ logger.warn("Invalid number format in uptime history: '{}'. Skipping.",
uptimeHistoryString);
return null;
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/docs/UptimeStatus.java b/src/main/java/org/torproject/metrics/onionoo/docs/UptimeStatus.java
index 912dd66..b65cc8e 100644
--- a/src/main/java/org/torproject/metrics/onionoo/docs/UptimeStatus.java
+++ b/src/main/java/org/torproject/metrics/onionoo/docs/UptimeStatus.java
@@ -13,7 +13,7 @@ import java.util.TreeSet;
public class UptimeStatus extends Document {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
UptimeStatus.class);
private transient boolean isDirty = false;
@@ -51,7 +51,7 @@ public class UptimeStatus extends Document {
this.bridgeHistory.add(parsedLine);
}
} else {
- log.error("Could not parse uptime history line '{}'. Skipping.",
+ logger.error("Could not parse uptime history line '{}'. Skipping.",
line);
}
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/docs/WeightsStatus.java b/src/main/java/org/torproject/metrics/onionoo/docs/WeightsStatus.java
index d3783fc..b9a8265 100644
--- a/src/main/java/org/torproject/metrics/onionoo/docs/WeightsStatus.java
+++ b/src/main/java/org/torproject/metrics/onionoo/docs/WeightsStatus.java
@@ -15,7 +15,7 @@ import java.util.TreeMap;
public class WeightsStatus extends Document {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
WeightsStatus.class);
private transient boolean isDirty = false;
@@ -59,8 +59,8 @@ public class WeightsStatus extends Document {
continue;
}
if (parts.length != 9 && parts.length != 11) {
- log.error("Illegal line '{}' in weights status file. Skipping this "
- + "line.", line);
+ logger.error("Illegal line '{}' in weights status file. Skipping "
+ + "this line.", line);
continue;
}
if (parts[4].equals("NaN")) {
@@ -71,13 +71,13 @@ public class WeightsStatus extends Document {
long validAfterMillis = DateTimeHelper.parse(parts[0] + " " + parts[1]);
long freshUntilMillis = DateTimeHelper.parse(parts[2] + " " + parts[3]);
if (validAfterMillis < 0L || freshUntilMillis < 0L) {
- log.error("Could not parse timestamp while reading "
+ logger.error("Could not parse timestamp while reading "
+ "weights status file. Skipping.");
break;
}
if (validAfterMillis > freshUntilMillis) {
- log.error("Illegal dates in '{}' of weights status file. Skipping.",
- line);
+ logger.error("Illegal dates in '{}' of weights status file. "
+ + "Skipping.", line);
break;
}
long[] interval = new long[] { validAfterMillis, freshUntilMillis };
@@ -92,8 +92,8 @@ public class WeightsStatus extends Document {
weights[6] = parseWeightDouble(parts[10]);
}
} catch (NumberFormatException e) {
- log.error("Could not parse weights values in line '{}' while reading "
- + "weights status file. Skipping.", line);
+ logger.error("Could not parse weights values in line '{}' while "
+ + "reading weights status file. Skipping.", line);
break;
}
this.history.put(interval, weights);
diff --git a/src/main/java/org/torproject/metrics/onionoo/server/NodeIndexer.java b/src/main/java/org/torproject/metrics/onionoo/server/NodeIndexer.java
index b32b1bc..9ba941a 100644
--- a/src/main/java/org/torproject/metrics/onionoo/server/NodeIndexer.java
+++ b/src/main/java/org/torproject/metrics/onionoo/server/NodeIndexer.java
@@ -30,7 +30,7 @@ import javax.servlet.ServletContextListener;
public class NodeIndexer implements ServletContextListener, Runnable {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
NodeIndexer.class);
@Override
@@ -38,7 +38,7 @@ public class NodeIndexer implements ServletContextListener, Runnable {
File outDir = new File(System.getProperty("onionoo.basedir",
"/srv/onionoo.torproject.org/onionoo"), "out");
if (!outDir.exists() || !outDir.isDirectory()) {
- log.error("\n\n\tOut-dir not found! Expected directory: {}"
+ logger.error("\n\n\tOut-dir not found! Expected directory: {}"
+ "\n\tSet system property 'onionoo.basedir'.", outDir);
System.exit(1);
}
@@ -115,7 +115,7 @@ public class NodeIndexer implements ServletContextListener, Runnable {
}
}
} catch (Throwable th) { // catch all and log
- log.error("Indexing failed: {}", th.getMessage(), th);
+ logger.error("Indexing failed: {}", th.getMessage(), th);
}
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/server/PerformanceMetrics.java b/src/main/java/org/torproject/metrics/onionoo/server/PerformanceMetrics.java
index 2ffd460..22a5573 100644
--- a/src/main/java/org/torproject/metrics/onionoo/server/PerformanceMetrics.java
+++ b/src/main/java/org/torproject/metrics/onionoo/server/PerformanceMetrics.java
@@ -14,7 +14,7 @@ import java.util.TimeZone;
public class PerformanceMetrics {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
PerformanceMetrics.class);
private static final Object lock = new Object();
@@ -65,19 +65,24 @@ public class PerformanceMetrics {
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- log.info("Request statistics ({}, {} s):",
+ logger.info("Request statistics ({}, {} s):",
dateTimeFormat.format(lastLoggedMillis + LOG_INTERVAL_MILLIS),
LOG_INTERVAL_SECONDS);
- log.info(" Total processed requests: {}", totalProcessedRequests);
- log.info(" Most frequently requested resource: {}",
+ logger.info(" Total processed requests: {}", totalProcessedRequests);
+ logger.info(" Most frequently requested resource: {}",
requestsByResourceType);
- log.info(" Most frequently requested parameter combinations: {}",
+ logger.info(" Most frequently requested parameter combinations: {}",
requestsByParameters);
- log.info(" Matching relays per request: {}", matchingRelayDocuments);
- log.info(" Matching bridges per request: {}", matchingBridgeDocuments);
- log.info(" Written characters per response: {}", writtenChars);
- log.info(" Milliseconds to handle request: {}", handleRequestMillis);
- log.info(" Milliseconds to build response: {}", buildResponseMillis);
+ logger.info(" Matching relays per request: {}",
+ matchingRelayDocuments);
+ logger.info(" Matching bridges per request: {}",
+ matchingBridgeDocuments);
+ logger.info(" Written characters per response: {}",
+ writtenChars);
+ logger.info(" Milliseconds to handle request: {}",
+ handleRequestMillis);
+ logger.info(" Milliseconds to build response: {}",
+ buildResponseMillis);
totalProcessedRequests.clear();
requestsByResourceType.clear();
requestsByParameters.clear();
@@ -94,7 +99,7 @@ public class PerformanceMetrics {
totalProcessedRequests.increment();
long handlingTime = parsedRequestMillis - receivedRequestMillis;
if (handlingTime > DateTimeHelper.ONE_SECOND) {
- log.warn("longer request handling: {} ms for {} params: {} and {} "
+ logger.warn("longer request handling: {} ms for {} params: {} and {} "
+ "chars.", handlingTime, resourceType, parameterKeys,
charsWritten);
}
@@ -106,7 +111,7 @@ public class PerformanceMetrics {
writtenChars.addLong(charsWritten);
long responseTime = writtenResponseMillis - parsedRequestMillis;
if (responseTime > DateTimeHelper.ONE_SECOND) {
- log.warn("longer response building: {} ms for {} params: {} and {} "
+ logger.warn("longer response building: {} ms for {} params: {} and {} "
+ "chars.", responseTime, resourceType, parameterKeys,
charsWritten);
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/server/ServerMain.java b/src/main/java/org/torproject/metrics/onionoo/server/ServerMain.java
index 8bc2fa4..0cab37a 100644
--- a/src/main/java/org/torproject/metrics/onionoo/server/ServerMain.java
+++ b/src/main/java/org/torproject/metrics/onionoo/server/ServerMain.java
@@ -11,21 +11,21 @@ import org.slf4j.LoggerFactory;
public class ServerMain {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
ServerMain.class);
/** Starts the web server listening for incoming client connections. */
public static void main(String[] args) {
try {
Resource onionooXml = Resource.newSystemResource("jetty.xml");
- log.info("Reading configuration from '{}'.", onionooXml);
+ logger.info("Reading configuration from '{}'.", onionooXml);
XmlConfiguration configuration = new XmlConfiguration(
onionooXml.getInputStream());
Server server = (Server) configuration.configure();
server.start();
server.join();
} catch (Exception ex) {
- log.error("Exiting, because of: {}", ex.getMessage(), ex);
+ logger.error("Exiting, because of: {}", ex.getMessage(), ex);
System.exit(1);
}
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/updater/DescriptorQueue.java b/src/main/java/org/torproject/metrics/onionoo/updater/DescriptorQueue.java
index 8ebae37..972bde8 100644
--- a/src/main/java/org/torproject/metrics/onionoo/updater/DescriptorQueue.java
+++ b/src/main/java/org/torproject/metrics/onionoo/updater/DescriptorQueue.java
@@ -24,7 +24,7 @@ import java.util.TreeMap;
class DescriptorQueue {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
DescriptorQueue.class);
private File statusDir;
@@ -89,12 +89,12 @@ class DescriptorQueue {
String[] parts = line.split(" ", 2);
excludedFiles.put(parts[1], Long.parseLong(parts[0]));
} catch (NumberFormatException e) {
- log.error("Illegal line '{}' in parse history. Skipping line.",
+ logger.error("Illegal line '{}' in parse history. Skipping line.",
line);
}
}
} catch (IOException e) {
- log.error("Could not read history file '{}'. Not excluding "
+ logger.error("Could not read history file '{}'. Not excluding "
+ "descriptors in this execution.",
this.historyFile.getAbsolutePath(), e);
return;
@@ -109,8 +109,8 @@ class DescriptorQueue {
return;
}
if (null == this.descriptors) {
- log.debug("Not writing history file {}, because we did not read a single "
- + "descriptor from {}.", this.historyFile, this.directory);
+ logger.debug("Not writing history file {}, because we did not read a "
+ + "single descriptor from {}.", this.historyFile, this.directory);
return;
}
SortedMap<String, Long> excludedAndParsedFiles = new TreeMap<>();
@@ -127,8 +127,9 @@ class DescriptorQueue {
bw.write(lastModifiedMillis + " " + absolutePath + "\n");
}
} catch (IOException e) {
- log.error("Could not write history file '{}'. Not excluding descriptors "
- + "in next execution.", this.historyFile.getAbsolutePath());
+ logger.error("Could not write history file '{}'. Not excluding "
+ + "descriptors in next execution.",
+ this.historyFile.getAbsolutePath());
}
}
@@ -142,8 +143,8 @@ class DescriptorQueue {
this.descriptors = this.descriptorReader.readDescriptors(
this.directory).iterator();
} else {
- log.error("Directory {} either does not exist or is not a directory. "
- + "Not adding to descriptor reader.",
+ logger.error("Directory {} either does not exist or is not a "
+ + "directory. Not adding to descriptor reader.",
this.directory.getAbsolutePath());
return null;
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/updater/DescriptorSource.java b/src/main/java/org/torproject/metrics/onionoo/updater/DescriptorSource.java
index 27be94d..22f9127 100644
--- a/src/main/java/org/torproject/metrics/onionoo/updater/DescriptorSource.java
+++ b/src/main/java/org/torproject/metrics/onionoo/updater/DescriptorSource.java
@@ -20,7 +20,7 @@ import java.util.Set;
public class DescriptorSource {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
DescriptorSource.class);
private final File inDir = new File("in");
@@ -111,29 +111,31 @@ public class DescriptorSource {
* any registered listeners. */
public void readDescriptors() {
this.readArchivedDescriptors();
- log.debug("Reading recent {} ...", DescriptorType.RELAY_SERVER_DESCRIPTORS);
+ logger.debug("Reading recent {} ...",
+ DescriptorType.RELAY_SERVER_DESCRIPTORS);
this.readDescriptors(DescriptorType.RELAY_SERVER_DESCRIPTORS,
DescriptorHistory.RELAY_SERVER_HISTORY, true);
- log.debug("Reading recent {} ...", DescriptorType.RELAY_EXTRA_INFOS);
+ logger.debug("Reading recent {} ...", DescriptorType.RELAY_EXTRA_INFOS);
this.readDescriptors(DescriptorType.RELAY_EXTRA_INFOS,
DescriptorHistory.RELAY_EXTRAINFO_HISTORY, true);
- log.debug("Reading recent {} ...", DescriptorType.EXIT_LISTS);
+ logger.debug("Reading recent {} ...", DescriptorType.EXIT_LISTS);
this.readDescriptors(DescriptorType.EXIT_LISTS,
DescriptorHistory.EXIT_LIST_HISTORY, true);
- log.debug("Reading recent {} ...", DescriptorType.RELAY_CONSENSUSES);
+ logger.debug("Reading recent {} ...", DescriptorType.RELAY_CONSENSUSES);
this.readDescriptors(DescriptorType.RELAY_CONSENSUSES,
DescriptorHistory.RELAY_CONSENSUS_HISTORY, true);
- log.debug("Reading recent {} ...",
+ logger.debug("Reading recent {} ...",
DescriptorType.BRIDGE_SERVER_DESCRIPTORS);
this.readDescriptors(DescriptorType.BRIDGE_SERVER_DESCRIPTORS,
DescriptorHistory.BRIDGE_SERVER_HISTORY, false);
- log.debug("Reading recent {} ...", DescriptorType.BRIDGE_EXTRA_INFOS);
+ logger.debug("Reading recent {} ...", DescriptorType.BRIDGE_EXTRA_INFOS);
this.readDescriptors(DescriptorType.BRIDGE_EXTRA_INFOS,
DescriptorHistory.BRIDGE_EXTRAINFO_HISTORY, false);
- log.debug("Reading recent {} ...", DescriptorType.BRIDGE_STATUSES);
+ logger.debug("Reading recent {} ...", DescriptorType.BRIDGE_STATUSES);
this.readDescriptors(DescriptorType.BRIDGE_STATUSES,
DescriptorHistory.BRIDGE_STATUS_HISTORY, false);
- log.debug("Reading recent {} ...", DescriptorType.BRIDGE_POOL_ASSIGNMENTS);
+ logger.debug("Reading recent {} ...",
+ DescriptorType.BRIDGE_POOL_ASSIGNMENTS);
this.readDescriptors(DescriptorType.BRIDGE_POOL_ASSIGNMENTS,
DescriptorHistory.BRIDGE_POOL_ASSIGNMENTS_HISTORY, false);
}
@@ -154,7 +156,7 @@ public class DescriptorSource {
}
}
}
- log.info("Read recent/{}.", descriptorType.getDir());
+ logger.info("Read recent/{}.", descriptorType.getDir());
}
/** Reads archived descriptors from disk and feeds them into any
@@ -163,7 +165,7 @@ public class DescriptorSource {
if (!this.inArchiveDir.exists()) {
return;
}
- log.info("Reading archived descriptors...");
+ logger.info("Reading archived descriptors...");
this.archiveDescriptorQueue = new DescriptorQueue(this.inArchiveDir,
null, this.statusDir);
this.archiveDescriptorQueue.readHistoryFile(
@@ -204,8 +206,8 @@ public class DescriptorSource {
}
}
if (descriptorType == null) {
- log.warn("Unrecognized descriptor in {} with annotations {}. Skipping "
- + "descriptor.", this.inArchiveDir.getAbsolutePath(),
+ logger.warn("Unrecognized descriptor in {} with annotations {}. "
+ + "Skipping descriptor.", this.inArchiveDir.getAbsolutePath(),
descriptor.getAnnotations());
continue;
}
@@ -215,12 +217,12 @@ public class DescriptorSource {
}
}
this.archiveDescriptorQueue.writeHistoryFile();
- log.info("Read archived descriptors");
+ logger.info("Read archived descriptors");
}
/** Writes parse histories for recent descriptors to disk. */
public void writeHistoryFiles() {
- log.debug("Writing parse histories for recent descriptors...");
+ logger.debug("Writing parse histories for recent descriptors...");
for (DescriptorQueue descriptorQueue : this.recentDescriptorQueues) {
descriptorQueue.writeHistoryFile();
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/updater/LookupService.java b/src/main/java/org/torproject/metrics/onionoo/updater/LookupService.java
index 9a9dad5..32cc112 100644
--- a/src/main/java/org/torproject/metrics/onionoo/updater/LookupService.java
+++ b/src/main/java/org/torproject/metrics/onionoo/updater/LookupService.java
@@ -29,7 +29,7 @@ import java.util.regex.Pattern;
public class LookupService {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
LookupService.class);
private File geoipDir;
@@ -52,20 +52,20 @@ public class LookupService {
this.geoLite2CityBlocksIPv4CsvFile = new File(this.geoipDir,
"GeoLite2-City-Blocks-IPv4.csv");
if (!this.geoLite2CityBlocksIPv4CsvFile.exists()) {
- log.error("No GeoLite2-City-Blocks-IPv4.csv file in geoip/.");
+ logger.error("No GeoLite2-City-Blocks-IPv4.csv file in geoip/.");
return;
}
this.geoLite2CityLocationsEnCsvFile = new File(this.geoipDir,
"GeoLite2-City-Locations-en.csv");
if (!this.geoLite2CityLocationsEnCsvFile.exists()) {
- log.error("No GeoLite2-City-Locations-en.csv file in "
+ logger.error("No GeoLite2-City-Locations-en.csv file in "
+ "geoip/.");
return;
}
this.geoLite2AsnBlocksIpv4CsvFile = new File(this.geoipDir,
"GeoLite2-ASN-Blocks-IPv4.csv");
if (!this.geoLite2AsnBlocksIpv4CsvFile.exists()) {
- log.error("No GeoLite2-ASN-Blocks-IPv4.csv file in geoip/.");
+ logger.error("No GeoLite2-ASN-Blocks-IPv4.csv file in geoip/.");
return;
}
this.hasAllFiles = true;
@@ -135,7 +135,7 @@ public class LookupService {
while ((line = br.readLine()) != null) {
String[] parts = line.split(",", -1);
if (parts.length < 9) {
- log.error("Illegal line '{}' in {}.", line,
+ logger.error("Illegal line '{}' in {}.", line,
this.geoLite2CityBlocksIPv4CsvFile.getAbsolutePath());
return lookupResults;
}
@@ -144,14 +144,14 @@ public class LookupService {
String startAddressString = networkAddressAndMask[0];
long startIpNum = this.parseAddressString(startAddressString);
if (startIpNum < 0L) {
- log.error("Illegal IP address in '{}' in {}.", line,
+ logger.error("Illegal IP address in '{}' in {}.", line,
this.geoLite2CityBlocksIPv4CsvFile.getAbsolutePath());
return lookupResults;
}
int networkMaskLength = networkAddressAndMask.length < 2 ? 0
: Integer.parseInt(networkAddressAndMask[1]);
if (networkMaskLength < 8 || networkMaskLength > 32) {
- log.error("Missing or illegal network mask in '{}' in {}.", line,
+ logger.error("Missing or illegal network mask in '{}' in {}.", line,
this.geoLite2CityBlocksIPv4CsvFile.getAbsolutePath());
return lookupResults;
}
@@ -173,13 +173,13 @@ public class LookupService {
}
}
} catch (NumberFormatException e) {
- log.error("Number format exception while parsing line '{}' in {}.",
+ logger.error("Number format exception while parsing line '{}' in {}.",
line, this.geoLite2CityBlocksIPv4CsvFile.getAbsolutePath(), e);
return lookupResults;
}
}
} catch (IOException e) {
- log.error("I/O exception while reading {}: {}",
+ logger.error("I/O exception while reading {}: {}",
this.geoLite2CityBlocksIPv4CsvFile.getAbsolutePath(), e);
return lookupResults;
}
@@ -194,7 +194,7 @@ public class LookupService {
while ((line = br.readLine()) != null) {
String[] parts = line.replaceAll("\"", "").split(",", 13);
if (parts.length != 13) {
- log.error("Illegal line '{}' in {}.", line,
+ logger.error("Illegal line '{}' in {}.", line,
this.geoLite2CityLocationsEnCsvFile.getAbsolutePath());
return lookupResults;
}
@@ -205,13 +205,13 @@ public class LookupService {
blockLocations.put(locId, line);
}
} catch (NumberFormatException e) {
- log.error("Number format exception while parsing line '{}' in {}.",
+ logger.error("Number format exception while parsing line '{}' in {}.",
line, this.geoLite2CityLocationsEnCsvFile.getAbsolutePath());
return lookupResults;
}
}
} catch (IOException e) {
- log.error("I/O exception while reading {}: {}",
+ logger.error("I/O exception while reading {}: {}",
this.geoLite2CityLocationsEnCsvFile.getAbsolutePath(), e);
return lookupResults;
}
@@ -228,7 +228,7 @@ public class LookupService {
while ((line = br.readLine()) != null) {
String[] parts = line.replaceAll("\"", "").split(",", 3);
if (parts.length != 3) {
- log.error("Illegal line '{}' in {}.", line,
+ logger.error("Illegal line '{}' in {}.", line,
this.geoLite2AsnBlocksIpv4CsvFile.getAbsolutePath());
return lookupResults;
}
@@ -237,14 +237,14 @@ public class LookupService {
String startAddressString = networkAddressAndMask[0];
long startIpNum = this.parseAddressString(startAddressString);
if (startIpNum < 0L) {
- log.error("Illegal IP address in '{}' in {}.", line,
+ logger.error("Illegal IP address in '{}' in {}.", line,
this.geoLite2AsnBlocksIpv4CsvFile.getAbsolutePath());
return lookupResults;
}
int networkMaskLength = networkAddressAndMask.length < 2 ? 0
: Integer.parseInt(networkAddressAndMask[1]);
if (networkMaskLength < 8 || networkMaskLength > 32) {
- log.error("Missing or illegal network mask in '{}' in {}.", line,
+ logger.error("Missing or illegal network mask in '{}' in {}.", line,
this.geoLite2AsnBlocksIpv4CsvFile.getAbsolutePath());
return lookupResults;
}
@@ -275,13 +275,13 @@ public class LookupService {
break;
}
} catch (NumberFormatException e) {
- log.error("Number format exception while parsing line '{}' in {}.",
+ logger.error("Number format exception while parsing line '{}' in {}.",
line, this.geoLite2AsnBlocksIpv4CsvFile.getAbsolutePath());
return lookupResults;
}
}
} catch (IOException e) {
- log.error("I/O exception while reading {}: {}",
+ logger.error("I/O exception while reading {}: {}",
this.geoLite2AsnBlocksIpv4CsvFile.getAbsolutePath(), e);
return lookupResults;
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/updater/NodeDetailsStatusUpdater.java b/src/main/java/org/torproject/metrics/onionoo/updater/NodeDetailsStatusUpdater.java
index ce809aa..d59c533 100644
--- a/src/main/java/org/torproject/metrics/onionoo/updater/NodeDetailsStatusUpdater.java
+++ b/src/main/java/org/torproject/metrics/onionoo/updater/NodeDetailsStatusUpdater.java
@@ -67,7 +67,7 @@ import java.util.TreeSet;
public class NodeDetailsStatusUpdater implements DescriptorListener,
StatusUpdater {
- private Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
NodeDetailsStatusUpdater.class);
private DescriptorSource descriptorSource;
@@ -403,19 +403,19 @@ public class NodeDetailsStatusUpdater implements DescriptorListener,
@Override
public void updateStatuses() {
this.readNodeStatuses();
- log.info("Read node statuses");
+ logger.info("Read node statuses");
this.startReverseDomainNameLookups();
- log.info("Started reverse domain name lookups");
+ logger.info("Started reverse domain name lookups");
this.lookUpCitiesAndASes();
- log.info("Looked up cities and ASes");
+ logger.info("Looked up cities and ASes");
this.calculatePathSelectionProbabilities();
- log.info("Calculated path selection probabilities");
+ logger.info("Calculated path selection probabilities");
this.computeEffectiveAndExtendedFamilies();
- log.info("Computed effective and extended families");
+ logger.info("Computed effective and extended families");
this.finishReverseDomainNameLookups();
- log.info("Finished reverse domain name lookups");
+ logger.info("Finished reverse domain name lookups");
this.updateNodeDetailsStatuses();
- log.info("Updated node and details statuses");
+ logger.info("Updated node and details statuses");
}
/* Step 2: read node statuses from disk. */
@@ -571,8 +571,7 @@ public class NodeDetailsStatusUpdater implements DescriptorListener,
addressStrings.add(nodeStatus.getAddress());
}
if (addressStrings.isEmpty()) {
- log.error("No relay IP addresses to resolve to cities or "
- + "ASN.");
+ logger.error("No relay IP addresses to resolve to cities or ASN.");
return;
}
SortedMap<String, LookupResult> lookupResults =
@@ -621,7 +620,7 @@ public class NodeDetailsStatusUpdater implements DescriptorListener,
wed = ((double) this.lastBandwidthWeights.get("Wed")) / 10000.0;
}
} else {
- log.debug("Not calculating new path selection probabilities, "
+ logger.debug("Not calculating new path selection probabilities, "
+ "because we could not determine most recent Wxx parameter "
+ "values, probably because we didn't parse a consensus in "
+ "this execution.");
diff --git a/src/main/java/org/torproject/metrics/onionoo/updater/StatusUpdateRunner.java b/src/main/java/org/torproject/metrics/onionoo/updater/StatusUpdateRunner.java
index 65ff859..efbd0d4 100644
--- a/src/main/java/org/torproject/metrics/onionoo/updater/StatusUpdateRunner.java
+++ b/src/main/java/org/torproject/metrics/onionoo/updater/StatusUpdateRunner.java
@@ -10,7 +10,7 @@ import java.io.File;
public class StatusUpdateRunner {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
StatusUpdateRunner.class);
private LookupService ls;
@@ -37,9 +37,9 @@ public class StatusUpdateRunner {
/** Lets each configured status updater update its status files. */
public void updateStatuses() {
for (StatusUpdater su : this.statusUpdaters) {
- log.debug("Begin update of {}", su.getClass().getSimpleName());
+ logger.debug("Begin update of {}", su.getClass().getSimpleName());
su.updateStatuses();
- log.info("{} updated status files", su.getClass().getSimpleName());
+ logger.info("{} updated status files", su.getClass().getSimpleName());
}
}
@@ -48,14 +48,11 @@ public class StatusUpdateRunner {
for (StatusUpdater su : this.statusUpdaters) {
String statsString = su.getStatsString();
if (statsString != null) {
- LoggerFactory.getLogger("statistics").info("{}\n{}",
- su.getClass().getSimpleName(), statsString);
+ logger.info("{}\n{}", su.getClass().getSimpleName(), statsString);
}
}
- LoggerFactory.getLogger("statistics")
- .info("GeoIP lookup service\n{}", this.ls.getStatsString());
- LoggerFactory.getLogger("statistics")
- .info("Reverse domain name resolver\n{}", this.rdnr.getStatsString());
+ logger.info("GeoIP lookup service\n{}", this.ls.getStatsString());
+ logger.info("Reverse domain name resolver\n{}", this.rdnr.getStatsString());
}
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/util/FormattingUtils.java b/src/main/java/org/torproject/metrics/onionoo/util/FormattingUtils.java
index b1bae46..9f713af 100644
--- a/src/main/java/org/torproject/metrics/onionoo/util/FormattingUtils.java
+++ b/src/main/java/org/torproject/metrics/onionoo/util/FormattingUtils.java
@@ -14,7 +14,7 @@ import java.util.regex.Pattern;
/** Static helper methods for string processing etc. */
public class FormattingUtils {
- private static Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
FormattingUtils.class);
private FormattingUtils() {
@@ -66,7 +66,7 @@ public class FormattingUtils {
mat.appendTail(sb);
return sb.toString();
} catch (Throwable ex) {
- log.debug("Couldn't process input '{}'.", text, ex);
+ logger.debug("Couldn't process input '{}'.", text, ex);
return text;
}
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/writer/BandwidthDocumentWriter.java b/src/main/java/org/torproject/metrics/onionoo/writer/BandwidthDocumentWriter.java
index 18317d9..2715682 100644
--- a/src/main/java/org/torproject/metrics/onionoo/writer/BandwidthDocumentWriter.java
+++ b/src/main/java/org/torproject/metrics/onionoo/writer/BandwidthDocumentWriter.java
@@ -21,7 +21,7 @@ import java.util.SortedSet;
public class BandwidthDocumentWriter implements DocumentWriter {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
BandwidthDocumentWriter.class);
private DocumentStore documentStore;
@@ -48,7 +48,7 @@ public class BandwidthDocumentWriter implements DocumentWriter {
fingerprint, mostRecentStatusMillis, bandwidthStatus);
this.documentStore.store(bandwidthDocument, fingerprint);
}
- log.info("Wrote bandwidth document files");
+ logger.info("Wrote bandwidth document files");
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/writer/ClientsDocumentWriter.java b/src/main/java/org/torproject/metrics/onionoo/writer/ClientsDocumentWriter.java
index 33b8a99..dcb935c 100644
--- a/src/main/java/org/torproject/metrics/onionoo/writer/ClientsDocumentWriter.java
+++ b/src/main/java/org/torproject/metrics/onionoo/writer/ClientsDocumentWriter.java
@@ -43,7 +43,7 @@ import java.util.SortedSet;
*/
public class ClientsDocumentWriter implements DocumentWriter {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
ClientsDocumentWriter.class);
private DocumentStore documentStore;
@@ -74,7 +74,7 @@ public class ClientsDocumentWriter implements DocumentWriter {
this.documentStore.store(clientsDocument, hashedFingerprint);
this.writtenDocuments++;
}
- log.info("Wrote clients document files");
+ logger.info("Wrote clients document files");
}
private String[] graphNames = new String[] {
diff --git a/src/main/java/org/torproject/metrics/onionoo/writer/DetailsDocumentWriter.java b/src/main/java/org/torproject/metrics/onionoo/writer/DetailsDocumentWriter.java
index 29d9244..0b9a36e 100644
--- a/src/main/java/org/torproject/metrics/onionoo/writer/DetailsDocumentWriter.java
+++ b/src/main/java/org/torproject/metrics/onionoo/writer/DetailsDocumentWriter.java
@@ -22,7 +22,7 @@ import java.util.TreeSet;
public class DetailsDocumentWriter implements DocumentWriter {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
DetailsDocumentWriter.class);
private DocumentStore documentStore;
@@ -48,7 +48,7 @@ public class DetailsDocumentWriter implements DocumentWriter {
this.updateBridgeDetailsFile(fingerprint, detailsStatus);
}
}
- log.info("Wrote details document files");
+ logger.info("Wrote details document files");
}
private void updateRelayDetailsFile(String fingerprint,
diff --git a/src/main/java/org/torproject/metrics/onionoo/writer/DocumentWriterRunner.java b/src/main/java/org/torproject/metrics/onionoo/writer/DocumentWriterRunner.java
index 99b627e..963b648 100644
--- a/src/main/java/org/torproject/metrics/onionoo/writer/DocumentWriterRunner.java
+++ b/src/main/java/org/torproject/metrics/onionoo/writer/DocumentWriterRunner.java
@@ -12,7 +12,7 @@ import org.slf4j.LoggerFactory;
public class DocumentWriterRunner {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
DocumentWriterRunner.class);
private DocumentWriter[] documentWriters;
@@ -34,7 +34,7 @@ public class DocumentWriterRunner {
public void writeDocuments() {
long mostRecentStatusMillis = retrieveMostRecentStatusMillis();
for (DocumentWriter dw : this.documentWriters) {
- log.debug("Writing {}", dw.getClass().getSimpleName());
+ logger.debug("Writing {}", dw.getClass().getSimpleName());
dw.writeDocuments(mostRecentStatusMillis);
}
}
@@ -56,7 +56,7 @@ public class DocumentWriterRunner {
for (DocumentWriter dw : this.documentWriters) {
String statsString = dw.getStatsString();
if (statsString != null) {
- log.info("{}\n{}", dw.getClass().getSimpleName(), statsString);
+ logger.info("{}\n{}", dw.getClass().getSimpleName(), statsString);
}
}
}
diff --git a/src/main/java/org/torproject/metrics/onionoo/writer/SummaryDocumentWriter.java b/src/main/java/org/torproject/metrics/onionoo/writer/SummaryDocumentWriter.java
index bcdb370..5975c6c 100644
--- a/src/main/java/org/torproject/metrics/onionoo/writer/SummaryDocumentWriter.java
+++ b/src/main/java/org/torproject/metrics/onionoo/writer/SummaryDocumentWriter.java
@@ -19,7 +19,7 @@ import java.util.SortedSet;
public class SummaryDocumentWriter implements DocumentWriter {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
SummaryDocumentWriter.class);
private DocumentStore documentStore;
@@ -108,7 +108,7 @@ public class SummaryDocumentWriter implements DocumentWriter {
this.writtenDocuments++;
}
}
- log.info("Wrote summary document files");
+ logger.info("Wrote summary document files");
}
@Override
diff --git a/src/main/java/org/torproject/metrics/onionoo/writer/UptimeDocumentWriter.java b/src/main/java/org/torproject/metrics/onionoo/writer/UptimeDocumentWriter.java
index f03b730..28ed9fd 100644
--- a/src/main/java/org/torproject/metrics/onionoo/writer/UptimeDocumentWriter.java
+++ b/src/main/java/org/torproject/metrics/onionoo/writer/UptimeDocumentWriter.java
@@ -26,7 +26,7 @@ import java.util.TreeSet;
public class UptimeDocumentWriter implements DocumentWriter {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
UptimeDocumentWriter.class);
private DocumentStore documentStore;
@@ -52,7 +52,7 @@ public class UptimeDocumentWriter implements DocumentWriter {
for (String fingerprint : updatedUptimeStatuses) {
this.updateDocument(fingerprint, mostRecentStatusMillis, uptimeStatus);
}
- log.info("Wrote uptime document files");
+ logger.info("Wrote uptime document files");
}
private int writtenDocuments = 0;
diff --git a/src/main/java/org/torproject/metrics/onionoo/writer/WeightsDocumentWriter.java b/src/main/java/org/torproject/metrics/onionoo/writer/WeightsDocumentWriter.java
index ceda9ef..cfd1123 100644
--- a/src/main/java/org/torproject/metrics/onionoo/writer/WeightsDocumentWriter.java
+++ b/src/main/java/org/torproject/metrics/onionoo/writer/WeightsDocumentWriter.java
@@ -21,7 +21,7 @@ import java.util.SortedSet;
public class WeightsDocumentWriter implements DocumentWriter {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
WeightsDocumentWriter.class);
private DocumentStore documentStore;
@@ -49,7 +49,7 @@ public class WeightsDocumentWriter implements DocumentWriter {
fingerprint, history, mostRecentStatusMillis);
this.documentStore.store(weightsDocument, fingerprint);
}
- log.info("Wrote weights document files");
+ logger.info("Wrote weights document files");
}
private String[] graphNames = new String[] {
diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml
deleted file mode 100644
index d61be28..0000000
--- a/src/main/resources/logback.xml
+++ /dev/null
@@ -1,82 +0,0 @@
-<configuration debug="false">
-
- <!-- a path and a prefix -->
- <property name="logfile-base" value="${LOGBASE}/onionoo-" />
-
- <!-- log file names -->
- <property name="fileall-logname" value="${logfile-base}all" />
- <property name="fileerr-logname" value="${logfile-base}err" />
- <property name="filestatistics-logname" value="${logfile-base}statistics" />
-
- <!-- date pattern -->
- <property name="utc-date-pattern" value="%date{ISO8601, UTC}" />
-
- <!-- appender section -->
- <appender name="FILEALL" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${fileall-logname}.log</file>
- <encoder>
- <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
- </encoder>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <!-- rollover daily -->
- <FileNamePattern>${fileall-logname}.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
- <maxHistory>10</maxHistory>
- <timeBasedFileNamingAndTriggeringPolicy
- class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
- <!-- or whenever the file size reaches 1MB -->
- <maxFileSize>1MB</maxFileSize>
- </timeBasedFileNamingAndTriggeringPolicy>
- </rollingPolicy>
- </appender>
-
- <appender name="FILEERR" class="ch.qos.logback.core.FileAppender">
- <file>${fileerr-logname}.log</file>
- <encoder>
- <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
- </encoder>
-
- <!-- ERROR or worse -->
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>ERROR</level>
- </filter>
- </appender>
-
- <appender name="FILESTATISTICS" class="ch.qos.logback.core.FileAppender">
- <file>${filestatistics-logname}.log</file>
- <encoder>
- <pattern>${utc-date-pattern} %msg%n</pattern>
- </encoder>
-
- <!-- only INFO level -->
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>INFO</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- </appender>
-
- <!-- logger section -->
- <logger name="org.torproject" >
- <appender-ref ref="FILEERR" />
- </logger>
-
- <logger name="org.eclipse" level="INFO" />
-
- <logger name="org.torproject.metrics.onionoo.cron.Main" >
- <appender-ref ref="FILESTATISTICS" />
- </logger>
-
- <logger name="org.torproject.metrics.onionoo.server.PerformanceMetrics" >
- <appender-ref ref="FILESTATISTICS" />
- </logger>
-
- <logger name="statistics" >
- <appender-ref ref="FILESTATISTICS" />
- </logger>
-
- <root level="ALL">
- <appender-ref ref="FILEALL" />
- </root>
-
-</configuration>
-
_______________________________________________
tor-commits mailing list
tor-commits@xxxxxxxxxxxxxxxxxxxx
https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-commits