Oz N Tiram 10 лет назад
Родитель
Сommit
5255609a6e
2 измененных файлов с 12 добавлено и 5 удалено
  1. 1 1
      blogit/blogit.py
  2. 11 4
      tests/test_all.py

+ 1 - 1
blogit/blogit.py

@@ -209,7 +209,7 @@ class Entry(object):
         try:
         try:
             self.prepare()
             self.prepare()
         except KeyError as E:
         except KeyError as E:
-            import pdb; pdb.set_trace()
+            pass
 
 
     def __str__(self):
     def __str__(self):
         return self.path
         return self.path

+ 11 - 4
tests/test_all.py

@@ -44,7 +44,7 @@ tags: {tags}
 public: yes
 public: yes
 chronological: yes
 chronological: yes
 kind: writing
 kind: writing
-summary: This is a summry of post {number}. Donec id elit non mi porta gravida at eget metus. Fusce dapibus
+summary: This is a summray of post {number}. Donec id elit non mi porta gravida at eget metus. Fusce dapibus
 ---
 ---
 
 
 This is the body of post {number}. Donec id elit non mi porta gravida at eget metus. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Etiam porta sem malesuada magna mollis euismod. Donec sed odio dui.
 This is the body of post {number}. Donec id elit non mi porta gravida at eget metus. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Etiam porta sem malesuada magna mollis euismod. Donec sed odio dui.
@@ -194,7 +194,7 @@ published: 2015-01-{}
 public: yes
 public: yes
 chronological: yes
 chronological: yes
 kind: writing
 kind: writing
-summary: This is a summry
+summary: This is a summary
 ---
 ---
 """
 """
     with open(os.path.join(CONFIG['content_root'], 'e.md'), 'w') as f:
     with open(os.path.join(CONFIG['content_root'], 'e.md'), 'w') as f:
@@ -269,5 +269,12 @@ def test_build():
         soup = BeautifulSoup(html_index.read(), 'html.parser')
         soup = BeautifulSoup(html_index.read(), 'html.parser')
         assert len(soup.find_all(class_='post')) == 12
         assert len(soup.find_all(class_='post')) == 12
 
 
-    # todo, check tag pages
-    # tag pages are not built properly
+    with open(os.path.join(CONFIG['output_to'], 'tags', 'foo', 'index.html')) as tag_foo:
+        soup = BeautifulSoup(tag_foo.read(), 'html.parser')
+        titles = [c.a.string for c in
+                  soup.find_all(class_="clearfix entry")]
+        for title, idx in zip(titles, range(15, 0, -1)):
+            assert title.split()[-1] == str(idx)
+
+
+