[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]

[tor-commits] [stem/master] Removed DescriptorParser



commit 9ad3395400cd21a26c0106e665aa6e1e2026a353
Author: Ravi Chandra Padmala <neenaoffline@xxxxxxxxx>
Date:   Thu Aug 9 13:01:44 2012 +0530

    Removed DescriptorParser
---
 stem/descriptor/__init__.py            |  306 +++++++++++++-------------------
 stem/descriptor/networkstatus.py       |  123 +++++++-------
 test/integ/descriptor/networkstatus.py |   40 +++--
 3 files changed, 214 insertions(+), 255 deletions(-)

diff --git a/stem/descriptor/__init__.py b/stem/descriptor/__init__.py
index 168b357..40f03ad 100644
--- a/stem/descriptor/__init__.py
+++ b/stem/descriptor/__init__.py
@@ -148,6 +148,21 @@ class Descriptor(object):
   def __str__(self):
     return self._raw_contents
 
+def _peek_line(descriptor_file):
+  """
+  Returns the line at the current offset of descriptor_file.
+  
+  :param file descriptor_file: file with the descriptor content
+  
+  :returns: line at the current offset of descriptor_file
+  """
+  
+  last_position = descriptor_file.tell()
+  line = descriptor_file.readline()
+  descriptor_file.seek(last_position)
+  
+  return line
+
 def _peek_keyword(descriptor_file):
   """
   Returns the keyword at the current offset of descriptor_file. Respects the
@@ -172,6 +187,82 @@ def _peek_keyword(descriptor_file):
   
   return keyword
 
+def _read_keyword_line(keyword, descriptor_file, validate = True, optional = False):
+  """
+  Returns the rest of the line if the first keyword matches the given keyword. If
+  it doesn't, a ValueError is raised if optional and validate are True, if
+  not, None is returned.
+  
+  Respects the opt keyword and returns the next keyword if the first is "opt".
+  
+  :param str keyword: keyword the line must begin with
+  :param bool optional: if the current line must begin with the given keyword
+  :param bool validate: validation is enabled
+  
+  :returns: the text after the keyword if the keyword matches the one provided, otherwise returns None or raises an exception
+  
+  :raises: ValueError if a non-optional keyword doesn't match when validation is enabled
+  """
+  
+  line = _peek_line(descriptor_file)
+  if not line:
+    if not optional and validate:
+      raise ValueError("Unexpected end of document")
+    return None
+  
+  if line_matches_keyword(keyword, line):
+    line = descriptor_file.readline()
+    
+    if line == "opt " + keyword or line == keyword: return ""
+    elif line.startswith("opt "): return line.split(" ", 2)[2].rstrip("\n")
+    else: return line.split(" ", 1)[1].rstrip("\n")
+  elif line.startswith("opt"):
+    # if this is something new we don't recognize
+    # ignore it and go to the next line
+    descriptor_file.readline()
+    return _read_keyword_line(keyword, descriptor_file, optional)
+  elif not optional and validate:
+    raise ValueError("Error parsing network status document: Expected %s, received: %s" % (keyword, line))
+  else: return None
+
+def _read_keyword_line_str(keyword, lines, validate = True, optional = False):
+  """
+  Returns the rest of the line if the first keyword matches the given keyword. If
+  it doesn't, a ValueError is raised if optional and validate are True, if
+  not, None is returned.
+  
+  Respects the opt keyword and returns the next keyword if the first is "opt".
+  
+  :param str keyword: keyword the line must begin with
+  :param list lines: list of strings to be read from
+  :param bool optional: if the current line must begin with the given keyword
+  :param bool validate: validation is enabled
+  
+  :returns: the text after the keyword if the keyword matches the one provided, otherwise returns None or raises an exception
+  
+  :raises: ValueError if a non-optional keyword doesn't match when validation is enabled
+  """
+  
+  if not lines:
+    if not optional and validate:
+      raise ValueError("Unexpected end of document")
+    return
+  
+  if line_matches_keyword(keyword, lines[0]):
+    line = lines.pop(0)
+    
+    if line == "opt " + keyword or line == keyword: return ""
+    elif line.startswith("opt "): return line.split(" ", 2)[2]
+    else: return line.split(" ", 1)[1]
+  elif line.startswith("opt "):
+    # if this is something new we don't recognize yet
+    # ignore it and go to the next line
+    lines.pop(0)
+    return _read_keyword_line_str(keyword, lines, optional)
+  elif not optional and validate:
+    raise ValueError("Error parsing network status document: Expected %s, received: %s" % (keyword, lines[0]))
+  else: return None
+
 def _read_until_keywords(keywords, descriptor_file, inclusive = False, ignore_first = False):
   """
   Reads from the descriptor file until we get to one of the given keywords or reach the
@@ -348,146 +439,10 @@ def _strptime(string, validate = True, optional = False):
     return datetime.datetime.strptime(string, "%Y-%m-%d %H:%M:%S")
   except ValueError, exc:
     if validate or not optional: raise exc
+    else: return None
 
-class DescriptorParser:
-  """
-  Helper class to parse documents.
-  
-  :var str line: current line to be being parsed
-  :var list lines: list of remaining lines to be parsed
-  """
-  
-  def __init__(self, raw_content, validate):
-    """
-    Create a new DocumentParser.
-    
-    :param str raw_content: content to be parsed
-    :param bool validate: if False, treats every keyword line as optional
-    """
-    
-    self._raw_content = raw_content
-    self.lines = raw_content.split("\n")
-    self.validate = validate
-    self.line = self.lines.pop(0)
-    
-  def peek_keyword(self):
-    """
-    Returns the first keyword in the next line. Respects the opt keyword and
-    returns the actual keyword if the first is "opt".
-    
-    :returns: the first keyword of the next line
-    """
-    
-    if self.line:
-      if self.line.startswith("opt "):
-        return self.line.split(" ")[1]
-      return self.line.split(" ")[0]
-  
-  def read_keyword_line(self, keyword, optional = False):
-    """
-    Returns the first keyword in the next line it matches the given keyword.
-    
-    If it doesn't match, a ValueError is raised if optional is True and if the
-    DocumentParser was created with validation enabled. If not, None is returned.
-    
-    Respects the opt keyword and returns the next keyword if the first is "opt".
-    
-    :param str keyword: keyword the line must begin with
-    :param bool optional: If the current line must begin with the given keyword
-    
-    :returns: the text after the keyword if the keyword matches the one provided, otherwise returns None or raises an exception
-    
-    :raises: ValueError if a non-optional keyword doesn't match when validation is enabled
-    """
-    
-    keyword_regex = re.compile("(opt )?" + re.escape(keyword) + "($| )")
-    
-    if not self.line:
-      if not optional and self.validate:
-        raise ValueError("Unexpected end of document")
-      return
-    
-    if keyword_regex.match(self.line):
-      line = self.read_line()
-      
-      if line == "opt " + keyword or line == keyword: return ""
-      elif line.startswith("opt "): return line.split(" ", 2)[2]
-      else: return line.split(" ", 1)[1]
-    elif self.line.startswith("opt"):
-      # if this is something new we don't recognize
-      # ignore it and go to the next line
-      self.read_line()
-      return self.read_keyword_line(self, keyword, optional)
-    elif not optional and self.validate:
-      raise ValueError("Error parsing network status document: Expected %s, received: %s" % (keyword, self.line))
-  
-  def read_line(self):
-    """
-    Returns the current line and shifts the parser to the next line.
-    
-    :returns: the current line if it exists, None otherwise
-    """
-    
-    if self.line:
-      try: tmp, self.line = self.line, self.lines.pop(0)
-      except IndexError: tmp, self.line = self.line, None
-      
-      return tmp
-    elif not optional and self.validate:
-      raise ValueError("Unexpected end of document")
-  
-  def read_block(self, keyword):
-    """
-    Returns a keyword block that begins with "-----BEGIN keyword-----\\n" and
-    ends with "-----END keyword-----\\n".
-    
-    :param str keyword: keyword block that must be read
-    
-    :returns: the data in the keyword block
-    """
-    
-    lines = []
-    
-    if self.line == "-----BEGIN " + keyword + "-----":
-      self.read_line()
-      while self.line != "-----END " + keyword + "-----":
-        lines.append(self.read_line())
-    
-    self.read_line() # pop out the END line
-    
-    return "\n".join(lines)
-  
-  def read_until(self, terminals = []):
-    """
-    Returns the data in the parser until a line that begins with one of the keywords in terminals are found.
-    
-    :param list terminals: list of strings at which we should stop reading and return the data
-    
-    :returns: the current line if it exists, None otherwise
-    """
-    
-    if self.line == None: return
-    lines = [self.read_line()]
-    while self.line and not self.line.split(" ")[0] in terminals:
-      lines.append(self.line)
-      self.read_line()
-    
-    return "\n".join(lines)
-  
-  def remaining(self):
-    """
-    Returns the data remaining in the parser.
-    
-    :returns: all a list of all unparsed lines
-    """
-    
-    if self.line:
-      lines, self.lines = self.lines, []
-      lines.insert(0, self.line)
-      self.line = None
-      return lines
-    else:
-      return []
+def line_matches_keyword(keyword, line):
+  return re.search("^(opt )?" + re.escape(keyword) + "($| )", line)
 
 class KeyCertificate(Descriptor):
   """
@@ -522,63 +477,54 @@ class KeyCertificate(Descriptor):
     self.fingerprint, self.identity_key, self.published = None, None, None
     self.expires, self.signing_key, self.crosscert = None, None, None
     self.certification = None
-    parser = DescriptorParser(raw_content, validate)
-    peek_check_kw = lambda keyword: keyword == parser.peek_keyword()
+    content = raw_content.splitlines()
     seen_keywords = set()
     
-    self.key_certificate_version = parser.read_keyword_line("dir-key-certificate-version")
-    if validate and self.key_certificate_version != "3": raise ValueError("Unrecognized dir-key-certificate-version")
+    self.key_certificate_version = _read_keyword_line_str("dir-key-certificate-version", content)
+    if validate and self.key_certificate_version != "3":
+      raise ValueError("Unrecognized dir-key-certificate-version")
     
-    def _read_keyword_line(keyword):
+    def read_keyword_line(keyword):
       if validate and keyword in seen_keywords:
         raise ValueError("Invalid key certificate: '%s' appears twice" % keyword)
       seen_keywords.add(keyword)
-      return parser.read_keyword_line(keyword)
+      return _read_keyword_line_str(keyword, content, validate)
     
-    while parser.line:
-      if peek_check_kw("dir-address"):
-        line = _read_keyword_line("dir-address")
+    while content:
+      if line_matches_keyword("dir-address", content[0]):
+        line = read_keyword_line("dir-address")
         try:
           self.ip, self.port = line.rsplit(":", 1)
           self.port = int(self.port)
         except Exception:
           if validate: raise ValueError("Invalid dir-address line: %s" % line)
-      
-      elif peek_check_kw("fingerprint"):
-        self.fingerprint = _read_keyword_line("fingerprint")
-      
-      elif peek_check_kw("dir-identity-key"):
-        _read_keyword_line("dir-identity-key")
-        self.identity_key = parser.read_block("RSA PUBLIC KEY")
-      
-      elif peek_check_kw("dir-key-published"):
-        self.published = _strptime(_read_keyword_line("dir-key-published"))
-      
-      elif peek_check_kw("dir-key-expires"):
-        self.expires = _strptime(_read_keyword_line("dir-key-expires"))
-      
-      elif peek_check_kw("dir-signing-key"):
-        _read_keyword_line("dir-signing-key")
-        self.signing_key = parser.read_block("RSA PUBLIC KEY")
-      
-      elif peek_check_kw("dir-key-crosscert"):
-        _read_keyword_line("dir-key-crosscert")
-        self.crosscert = parser.read_block("ID SIGNATURE")
-      
-      elif peek_check_kw("dir-key-certification"):
-        _read_keyword_line("dir-key-certification")
-        self.certification = parser.read_block("SIGNATURE")
+      elif line_matches_keyword("fingerprint", content[0]):
+        self.fingerprint = read_keyword_line("fingerprint")
+      elif line_matches_keyword("dir-identity-key", content[0]):
+        read_keyword_line("dir-identity-key")
+        self.identity_key = _get_pseudo_pgp_block(content)
+      elif line_matches_keyword("dir-key-published", content[0]):
+        self.published = _strptime(read_keyword_line("dir-key-published"))
+      elif line_matches_keyword("dir-key-expires", content[0]):
+        self.expires = _strptime(read_keyword_line("dir-key-expires"))
+      elif line_matches_keyword("dir-signing-key", content[0]):
+        read_keyword_line("dir-signing-key")
+        self.signing_key = _get_pseudo_pgp_block(content)
+      elif line_matches_keyword("dir-key-crosscert", content[0]):
+        read_keyword_line("dir-key-crosscert")
+        self.crosscert = _get_pseudo_pgp_block(content)
+      elif line_matches_keyword("dir-key-certification", content[0]):
+        read_keyword_line("dir-key-certification")
+        self.certification = _get_pseudo_pgp_block(content)
         break
-      
       elif validate:
-        raise ValueError("Key certificate contains unrecognized lines: %s" % parser.line)
-      
+        raise ValueError("Key certificate contains unrecognized lines: %s" % content[0])
       else:
         # ignore unrecognized lines if we aren't validating
-        self._unrecognized_lines.append(parser.read_line())
+        self.unrecognized_lines.append(content.pop(0))
     
-    self._unrecognized_lines = parser.remaining()
-    if self._unrecognized_lines and validate:
+    self.unrecognized_lines = content
+    if self.unrecognized_lines and validate:
       raise ValueError("Unrecognized trailing data in key certificate")
   
   def get_unrecognized_lines(self):
@@ -588,5 +534,5 @@ class KeyCertificate(Descriptor):
     :returns: a list of unrecognized lines
     """
     
-    return self._unrecognized_lines
+    return self.unrecognized_lines
 
diff --git a/stem/descriptor/networkstatus.py b/stem/descriptor/networkstatus.py
index a51fcbd..b1dc4dd 100644
--- a/stem/descriptor/networkstatus.py
+++ b/stem/descriptor/networkstatus.py
@@ -40,13 +40,18 @@ The documents can be obtained from any of the following sources...
 
 import re
 import datetime
-from StringIO import StringIO
+
+try:
+  from cStringIO import StringIO
+except:
+  from StringIO import StringIO
 
 import stem.descriptor
 import stem.version
 import stem.exit_policy
 
-from stem.descriptor import _read_until_keywords, _skip_until_keywords, _peek_keyword
+from stem.descriptor import _read_until_keywords, _skip_until_keywords, _peek_keyword, _strptime
+from stem.descriptor import _read_keyword_line, _read_keyword_line_str, _get_pseudo_pgp_block, _peek_line
 
 _bandwidth_weights_regex = re.compile(" ".join(["W%s=\d+" % weight for weight in ["bd",
   "be", "bg", "bm", "db", "eb", "ed", "ee", "eg", "em", "gb", "gd", "gg", "gm", "mb", "md", "me", "mg", "mm"]]))
@@ -80,13 +85,6 @@ def parse_file(document_file, validate = True):
   document.router_descriptors = _router_desc_generator(document_file, document.vote_status == "vote", validate)
   return document.router_descriptors
 
-def _strptime(string, validate = True, optional = False):
-  try:
-    return datetime.datetime.strptime(string, "%Y-%m-%d %H:%M:%S")
-  except ValueError, exc:
-    if validate or not optional: raise exc
-    else: return None
-
 def _router_desc_generator(document_file, vote, validate):
   while _peek_keyword(document_file) == "r":
     desc_content = "".join(_read_until_keywords(_router_desc_end_kws, document_file, False, True))
@@ -171,14 +169,13 @@ class NetworkStatusDocument(stem.descriptor.Descriptor):
     :returns: a list of unrecognized trailing lines
     """
     
-    return self._unrecognized_lines
+    return self.unrecognized_lines
   
   def _parse(self, raw_content):
     # preamble
+    content = StringIO(raw_content)
     validate = self.validated
-    doc_parser = stem.descriptor.DescriptorParser(raw_content, validate)
-    
-    read_keyword_line = lambda keyword, optional = False: setattr(self, keyword.replace("-", "_"), doc_parser.read_keyword_line(keyword, optional))
+    read_keyword_line = lambda keyword, optional = False: setattr(self, keyword.replace("-", "_"), _read_keyword_line(keyword, content, validate, optional))
     
     map(read_keyword_line, ["network-status-version", "vote-status"])
     if validate and not self._validate_network_status_version():
@@ -186,47 +183,49 @@ class NetworkStatusDocument(stem.descriptor.Descriptor):
     
     if self.vote_status == "vote": vote = True
     elif self.vote_status == "consensus": vote = False
-    elif validate: raise ValueError("Unrecognized document type specified in vote-status")
+    elif validate: raise ValueError("Unrecognized vote-status")
     
     if vote:
       read_keyword_line("consensus-methods", True)
       self.consensus_methods = [int(method) for method in self.consensus_methods.split(" ")]
-      self.published = _strptime(doc_parser.read_keyword_line("published", True), validate, True)
+      self.published = _strptime(_read_keyword_line("published", content, validate, True), validate, True)
     else:
-      self.consensus_method = int(doc_parser.read_keyword_line("consensus-method", True))
+      read_keyword_line("consensus-method", True)
+      self.consensus_method = int(self.consensus_method)
     
     map(read_keyword_line, ["valid-after", "fresh-until", "valid-until"])
     self.valid_after = _strptime(self.valid_after, validate)
     self.fresh_until = _strptime(self.fresh_until, validate)
     self.valid_until = _strptime(self.valid_until, validate)
-    voting_delay = doc_parser.read_keyword_line("voting-delay")
+    voting_delay = _read_keyword_line("voting-delay", content, validate)
     self.vote_delay, self.dist_delay = [int(delay) for delay in voting_delay.split(" ")]
     
-    client_versions = doc_parser.read_keyword_line("client-versions", True)
+    client_versions = _read_keyword_line("client-versions", content, validate, True)
     if client_versions:
       self.client_versions = [stem.version.Version(version_string) for version_string in client_versions.split(",")]
-    server_versions = doc_parser.read_keyword_line("server-versions", True)
+    server_versions = _read_keyword_line("server-versions", content, validate, True)
     if server_versions:
       self.server_versions = [stem.version.Version(version_string) for version_string in server_versions.split(",")]
-    self.known_flags = doc_parser.read_keyword_line("known-flags").split(" ")
+    self.known_flags = _read_keyword_line("known-flags", content, validate).split(" ")
     read_keyword_line("params", True)
     if self.params:
       self.params = dict([(param.split("=")[0], int(param.split("=")[1])) for param in self.params.split(" ")])
     
     # authority section
-    while doc_parser.line.startswith("dir-source "):
-      dirauth_data = doc_parser.read_until(["dir-source", "r", "directory-footer", "directory-signature", "bandwidth-weights"])
+    while _peek_keyword(content) == "dir-source":
+      dirauth_data = _read_until_keywords(["dir-source", "r", "directory-footer", "directory-signature", "bandwidth-weights"], content, False, True)
+      dirauth_data = "".join(dirauth_data).rstrip()
       self.directory_authorities.append(DirectoryAuthority(dirauth_data, vote, validate))
     
     # router descriptors
-    if doc_parser.peek_keyword() == "r":
-      router_descriptors_data = doc_parser.read_until(["bandwidth-weights", "directory-footer", "directory-signature"])
+    if _peek_keyword(content) == "r":
+      router_descriptors_data = "".join(_read_until_keywords(["bandwidth-weights", "directory-footer", "directory-signature"], content, False, True))
       self.router_descriptors = _router_desc_generator(StringIO(router_descriptors_data), vote, validate)
     
     # footer section
     if self.consensus_method > 9 or vote and filter(lambda x: x >= 9, self.consensus_methods):
-      if doc_parser.line == "directory-footer":
-        doc_parser.read_line()
+      if _peek_keyword(content) == "directory-footer":
+        content.readline()
       elif validate:
         raise ValueError("Network status document missing directory-footer")
     
@@ -237,12 +236,12 @@ class NetworkStatusDocument(stem.descriptor.Descriptor):
       elif validate:
         raise ValueError("Invalid bandwidth-weights line")
     
-    while doc_parser.line.startswith("directory-signature "):
-      signature_data = doc_parser.read_until(["directory-signature"])
-      self.directory_signatures.append(DirectorySignature(signature_data))
+    while _peek_keyword(content) == "directory-signature":
+      signature_data = _read_until_keywords("directory-signature", content, False, True)
+      self.directory_signatures.append(DirectorySignature("".join(signature_data)))
     
-    self._unrecognized_lines = doc_parser.remaining()
-    if validate and self._unrecognized_lines: raise ValueError("Unrecognized trailing data")
+    self.unrecognized_lines = content.read()
+    if validate and self.unrecognized_lines: raise ValueError("Unrecognized trailing data")
 
 class DirectoryAuthority(stem.descriptor.Descriptor):
   """
@@ -280,21 +279,21 @@ class DirectoryAuthority(stem.descriptor.Descriptor):
     self.nickname, self.identity, self.address, self.ip = None, None, None, None
     self.dirport, self.orport, self.legacy_dir_key = None, None, None
     self.key_certificate, self.contact, self.vote_digest = None, None, None
-    parser = stem.descriptor.DescriptorParser(raw_content, validate)
     
-    dir_source = parser.read_keyword_line("dir-source")
+    content = StringIO(raw_content)
+    dir_source = _read_keyword_line("dir-source", content, validate)
     self.nickname, self.identity, self.address, self.ip, self.dirport, self.orport = dir_source.split(" ")
     self.dirport = int(self.dirport)
     self.orport = int(self.orport)
     
-    self.contact = parser.read_keyword_line("contact")
+    self.contact = _read_keyword_line("contact", content, validate)
     if vote:
-      self.legacy_dir_key = parser.read_keyword_line("legacy-dir-key", True)
-      self.key_certificate = stem.descriptor.KeyCertificate("\n".join(parser.remaining()), validate)
+      self.legacy_dir_key = _read_keyword_line("legacy-dir-key", content, validate, True)
+      self.key_certificate = stem.descriptor.KeyCertificate(content.read(), validate)
     else:
-      self.vote_digest = parser.read_keyword_line("vote-digest", True)
-    self._unrecognized_lines = parser.remaining()
-    if self._unrecognized_lines and validate:
+      self.vote_digest = _read_keyword_line("vote-digest", content, True, validate)
+    self.unrecognized_lines = content.read()
+    if self.unrecognized_lines and validate:
       raise ValueError("Unrecognized trailing data in directory authority information")
   
   def get_unrecognized_lines(self):
@@ -304,7 +303,7 @@ class DirectoryAuthority(stem.descriptor.Descriptor):
     :returns: a list of unrecognized lines
     """
     
-    return self._unrecognized_lines
+    return self.unrecognized_lines
 
 class DirectorySignature(stem.descriptor.Descriptor):
   """
@@ -329,18 +328,20 @@ class DirectorySignature(stem.descriptor.Descriptor):
     
     super(DirectorySignature, self).__init__(raw_content)
     self.identity, self.key_digest, self.method, self.signature = None, None, None, None
-    parser = stem.descriptor.DescriptorParser(raw_content, validate)
+    content = raw_content.splitlines()
     
-    signature_line = parser.read_keyword_line("directory-signature").split(" ")
+    signature_line = _read_keyword_line_str("directory-signature", content, validate).split(" ")
     
     if len(signature_line) == 2:
       self.identity, self.key_digest = signature_line
-    if len(signature_line) == 3: # for microdescriptor consensuses
+    if len(signature_line) == 3:
+      # for microdescriptor consensuses
+      # This 'method' seems to be undocumented 8-8-12
       self.method, self.identity, self.key_digest = signature_line
     
-    self.signature = parser.read_block("SIGNATURE")
-    self._unrecognized_lines = parser.remaining()
-    if self._unrecognized_lines and validate:
+    self.signature = _get_pseudo_pgp_block(content)
+    self.unrecognized_lines = content
+    if self.unrecognized_lines and validate:
       raise ValueError("Unrecognized trailing data in directory signature")
   
   def get_unrecognized_lines(self):
@@ -350,7 +351,7 @@ class DirectorySignature(stem.descriptor.Descriptor):
     :returns: a list of unrecognized lines
     """
     
-    return self._unrecognized_lines
+    return self.unrecognized_lines
 
 class RouterDescriptor(stem.descriptor.Descriptor):
   """
@@ -446,26 +447,26 @@ class RouterDescriptor(stem.descriptor.Descriptor):
     :raises: ValueError if an error occures in validation
     """
     
-    parser = stem.descriptor.DescriptorParser(raw_content, validate)
+    content = StringIO(raw_content)
     seen_keywords = set()
-    peek_check_kw = lambda keyword: keyword == parser.peek_keyword()
+    peek_check_kw = lambda keyword: keyword == _peek_keyword(content)
     
-    r = parser.read_keyword_line("r")
+    r = _read_keyword_line("r", content, validate)
     # r mauer BD7xbfsCFku3+tgybEZsg8Yjhvw itcuKQ6PuPLJ7m/Oi928WjO2j8g 2012-06-22 13:19:32 80.101.105.103 9001 0
     # "r" SP nickname SP identity SP digest SP publication SP IP SP ORPort SP DirPort NL
-    seen_keywords.add("r")
     if r:
+      seen_keywords.add("r")
       values = r.split(" ")
       self.nickname, self.identity, self.digest = values[0], values[1], values[2]
       self.publication = _strptime(" ".join((values[3], values[4])), validate)
       self.ip, self.orport, self.dirport = values[5], int(values[6]), int(values[7])
       if self.dirport == 0: self.dirport = None
-    elif validate: raise ValueError("Invalid router descriptor: empty 'r' line" )
+    elif validate: raise ValueError("Invalid router descriptor: empty 'r' line")
     
-    while parser.line:
+    while _peek_line(content):
       if peek_check_kw("s"):
         if "s" in seen_keywords: raise ValueError("Invalid router descriptor: 's' line appears twice")
-        line = parser.read_keyword_line("s")
+        line = _read_keyword_line("s", content, validate)
         if not line: continue
         seen_keywords.add("s")
         # s Named Running Stable Valid
@@ -494,7 +495,7 @@ class RouterDescriptor(stem.descriptor.Descriptor):
       
       elif peek_check_kw("v"):
         if "v" in seen_keywords: raise ValueError("Invalid router descriptor: 'v' line appears twice")
-        line = parser.read_keyword_line("v", True)
+        line = _read_keyword_line("v", content, validate, True)
         seen_keywords.add("v")
         # v Tor 0.2.2.35
         if line:
@@ -506,7 +507,7 @@ class RouterDescriptor(stem.descriptor.Descriptor):
       
       elif peek_check_kw("w"):
         if "w" in seen_keywords: raise ValueError("Invalid router descriptor: 'w' line appears twice")
-        w = parser.read_keyword_line("w", True)
+        w = _read_keyword_line("w", content, validate, True)
         # "w" SP "Bandwidth=" INT [SP "Measured=" INT] NL
         seen_keywords.add("w")
         if w:
@@ -525,7 +526,7 @@ class RouterDescriptor(stem.descriptor.Descriptor):
       
       elif peek_check_kw("p"):
         if "p" in seen_keywords: raise ValueError("Invalid router descriptor: 'p' line appears twice")
-        p = parser.read_keyword_line("p", True)
+        p = _read_keyword_line("p", content, validate, True)
         seen_keywords.add("p")
         # "p" SP ("accept" / "reject") SP PortList NL
         if p:
@@ -533,17 +534,17 @@ class RouterDescriptor(stem.descriptor.Descriptor):
       
       elif vote and peek_check_kw("m"):
         # microdescriptor hashes
-        m = parser.read_keyword_line("m", True)
+        m = _read_keyword_line("m", content, validate, True)
         methods, digests = m.split(" ", 1)
         method_list = methods.split(",")
         digest_dict = [digest.split("=", 1) for digest in digests.split(" ")]
         self.microdescriptor_hashes.append((method_list, digest_dict))
       
       elif validate:
-        raise ValueError("Router descriptor contains unrecognized trailing lines: %s" % parser.line)
+        raise ValueError("Router descriptor contains unrecognized trailing lines: %s" % content.readline())
       
       else:
-        self._unrecognized_lines.append(parser.read_line()) # ignore unrecognized lines if we aren't validating
+        self.unrecognized_lines.append(content.readline()) # ignore unrecognized lines if we aren't validating
   
   def get_unrecognized_lines(self):
     """
@@ -552,5 +553,5 @@ class RouterDescriptor(stem.descriptor.Descriptor):
     :returns: a list of unrecognized lines
     """
     
-    return self._unrecognized_lines
+    return self.unrecognized_lines
 
diff --git a/test/integ/descriptor/networkstatus.py b/test/integ/descriptor/networkstatus.py
index 67992c7..f5d8942 100644
--- a/test/integ/descriptor/networkstatus.py
+++ b/test/integ/descriptor/networkstatus.py
@@ -39,8 +39,8 @@ class TestNetworkStatusDocument(unittest.TestCase):
     count = 0
     with open(descriptor_path) as descriptor_file:
       for desc in stem.descriptor.networkstatus.parse_file(descriptor_file):
-        if resource.getrusage(resource.RUSAGE_SELF).ru_maxrss > 100000:
-          # if we're using > 100 MB we should fail
+        if resource.getrusage(resource.RUSAGE_SELF).ru_maxrss > 200000:
+          # if we're using > 200 MB we should fail
           self.fail()
         assert desc.nickname # check that the router has a nickname
         count += 1
@@ -129,9 +129,11 @@ class TestNetworkStatusDocument(unittest.TestCase):
         }
     self.assertEquals(expected_bandwidth_weights, desc.bandwidth_weights)
     
-    expected_signature = """HFXB4497LzESysYJ/4jJY83E5vLjhv+igIxD9LU6lf6ftkGeF+lNmIAIEKaMts8H
+    expected_signature = """-----BEGIN SIGNATURE-----
+HFXB4497LzESysYJ/4jJY83E5vLjhv+igIxD9LU6lf6ftkGeF+lNmIAIEKaMts8H
 mfWcW0b+jsrXcJoCxV5IrwCDF3u1aC3diwZY6yiG186pwWbOwE41188XI2DeYPwE
-I/TJmV928na7RLZe2mGHCAW3VQOvV+QkCfj05VZ8CsY="""
+I/TJmV928na7RLZe2mGHCAW3VQOvV+QkCfj05VZ8CsY=
+-----END SIGNATURE-----"""
     self.assertEquals(8, len(desc.directory_signatures))
     self.assertEquals("14C131DFC5C6F93646BE72FA1401C02A8DF2E8B4", desc.directory_signatures[0].identity)
     self.assertEquals("BF112F1C6D5543CFD0A32215ACABD4197B5279AD", desc.directory_signatures[0].key_digest)
@@ -203,7 +205,8 @@ I/TJmV928na7RLZe2mGHCAW3VQOvV+QkCfj05VZ8CsY="""
     self.assertEquals("Mike Perry <email>", desc.directory_authorities[0].contact)
     self.assertEquals(None, desc.directory_authorities[0].legacy_dir_key)
     
-    expected_identity_key = """MIIBigKCAYEA6uSmsoxj2MiJ3qyZq0qYXlRoG8o82SNqg+22m+t1c7MlQOZWPJYn
+    expected_identity_key = """-----BEGIN RSA PUBLIC KEY-----
+MIIBigKCAYEA6uSmsoxj2MiJ3qyZq0qYXlRoG8o82SNqg+22m+t1c7MlQOZWPJYn
 XeMcBCt8xrTeIt2ZI+Q/Kt2QJSeD9WZRevTKk/kn5Tg2+xXPogalUU47y5tUohGz
 +Q8+CxtRSXpDxBHL2P8rLHvGrI69wbNHGoQkce/7gJy9vw5Ie2qzbyXk1NG6V8Fb
 pr6A885vHo6TbhUnolz2Wqt/kN+UorjLkN2H3fV+iGcQFv42SyHYGDLa0WwL3PJJ
@@ -211,21 +214,28 @@ r/veu36S3VaHBrfhutfioi+d3d4Ya0bKwiWi5Lm2CHuuRTgMpHLU9vlci8Hunuxq
 HsULe2oMsr4VEic7sW5SPC5Obpx6hStHdNv1GxoSEm3/vIuPM8pINpU5ZYAyH9yO
 Ef22ZHeiVMMKmpV9TtFyiFqvlI6GpQn3mNbsQqF1y3XCA3Q4vlRAkpgJVUSvTxFP
 2bNDobOyVCpCM/rwxU1+RCNY5MFJ/+oktUY+0ydvTen3gFdZdgNqCYjKPLfBNm9m
-RGL7jZunMUNvAgMBAAE="""
-    expected_signing_key = """MIGJAoGBAJ5itcJRYNEM3Qf1OVWLRkwjqf84oXPc2ZusaJ5zOe7TVvBMra9GNyc0
+RGL7jZunMUNvAgMBAAE=
+-----END RSA PUBLIC KEY-----"""
+    expected_signing_key = """-----BEGIN RSA PUBLIC KEY-----
+MIGJAoGBAJ5itcJRYNEM3Qf1OVWLRkwjqf84oXPc2ZusaJ5zOe7TVvBMra9GNyc0
 NM9y6zVkHCAePAjr4KbW/8P1olA6FUE2LV9bozaU1jFf6K8B2OELKs5FUEW+n+ic
-GM0x6MhngyXonWOcKt5Gj+mAu5lrno9tpNbPkz2Utr/Pi0nsDhWlAgMBAAE="""
-    expected_key_crosscert = """RHYImGTwg36wmEdAn7qaRg2sAfql7ZCtPIL/O3lU5OIdXXp0tNn/K00Bamqohjk+
+GM0x6MhngyXonWOcKt5Gj+mAu5lrno9tpNbPkz2Utr/Pi0nsDhWlAgMBAAE=
+-----END RSA PUBLIC KEY-----"""
+    expected_key_crosscert = """-----BEGIN ID SIGNATURE-----
+RHYImGTwg36wmEdAn7qaRg2sAfql7ZCtPIL/O3lU5OIdXXp0tNn/K00Bamqohjk+
 Tz4FKsKXGDlbGv67PQcZPOK6NF0GRkNh4pk89prrDO4XwtEn7rkHHdBH6/qQ7IRG
-GdDZHtZ1a69oFZvPWD3hUaB50xeIe7GoKdKIfdNNJ+8="""
-    expected_key_certification = """fasWOGyUZ3iMCYpDfJ+0JcMiTH25sXPWzvlHorEOyOMbaMqRYpZU4GHzt1jLgdl6
+GdDZHtZ1a69oFZvPWD3hUaB50xeIe7GoKdKIfdNNJ+8=
+-----END ID SIGNATURE-----"""
+    expected_key_certification = """-----BEGIN SIGNATURE-----
+fasWOGyUZ3iMCYpDfJ+0JcMiTH25sXPWzvlHorEOyOMbaMqRYpZU4GHzt1jLgdl6
 AAoR6KdamsLg5VE8xzst48a4UFuzHFlklZ5O8om2rcvDd5DhSnWWYZnYJecqB+bo
 dNisPmaIVSAWb29U8BpNRj4GMC9KAgGYUj8aE/KtutAeEekFfFEHTfWZ2fFp4j3m
 9rY8FWraqyiF+Emq1T8pAAgMQ+79R3oZxq0TXS42Z4Anhms735ccauKhI3pDKjbl
 tD5vAzIHOyjAOXj7a6jY/GrnaBNuJ4qe/4Hf9UmzK/jKKwG95BPJtPTT4LoFwEB0
 KG2OUeQUNoCck4nDpsZwFqPlrWCHcHfTV2iDYFV1HQWDTtZz/qf+GtB8NXsq+I1w
 brADmvReM2BD6p/13h0QURCI5hq7ZYlIKcKrBa0jn1d9cduULl7vgKsRCJDls/ID
-emBZ6pUxMpBmV0v+PrA3v9w4DlE7GHAq61FF/zju2kpqj6MInbEvI/E+e438sWsL"""
+emBZ6pUxMpBmV0v+PrA3v9w4DlE7GHAq61FF/zju2kpqj6MInbEvI/E+e438sWsL
+-----END SIGNATURE-----"""
     self.assertEquals("3", desc.directory_authorities[0].key_certificate.key_certificate_version)
     self.assertEquals("27B6B5996C426270A5C95488AA5BCEB6BCC86956", desc.directory_authorities[0].key_certificate.fingerprint)
     self.assertEquals(_strptime("2011-11-28 21:51:04"), desc.directory_authorities[0].key_certificate.published)
@@ -237,9 +247,11 @@ emBZ6pUxMpBmV0v+PrA3v9w4DlE7GHAq61FF/zju2kpqj6MInbEvI/E+e438sWsL"""
     self.assertEquals(None, desc.directory_authorities[0].vote_digest)
     self.assertEquals({}, desc.bandwidth_weights)
     
-    expected_signature = """fskXN84wB3mXfo+yKGSt0AcDaaPuU3NwMR3ROxWgLN0KjAaVi2eV9PkPCsQkcgw3
+    expected_signature = """-----BEGIN SIGNATURE-----
+fskXN84wB3mXfo+yKGSt0AcDaaPuU3NwMR3ROxWgLN0KjAaVi2eV9PkPCsQkcgw3
 JZ/1HL9sHyZfo6bwaC6YSM9PNiiY6L7rnGpS7UkHiFI+M96VCMorvjm5YPs3FioJ
-DnN5aFtYKiTc19qIC7Nmo+afPdDEf0MlJvEOP5EWl3w="""
+DnN5aFtYKiTc19qIC7Nmo+afPdDEf0MlJvEOP5EWl3w=
+-----END SIGNATURE-----"""
     self.assertEquals(1, len(desc.directory_signatures))
     self.assertEquals("27B6B5996C426270A5C95488AA5BCEB6BCC86956", desc.directory_signatures[0].identity)
     self.assertEquals("D5C30C15BB3F1DA27669C2D88439939E8F418FCF", desc.directory_signatures[0].key_digest)



_______________________________________________
tor-commits mailing list
tor-commits@xxxxxxxxxxxxxxxxxxxx
https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-commits