Browse Source

100% test and the blog out puts look fine

Oz N Tiram 9 years ago
parent
commit
9a689b98af
3 changed files with 20 additions and 5 deletions
  1. 1 1
      blogit/blogit.py
  2. 1 1
      templates/archive_index.html
  3. 18 3
      tests/test_all.py

+ 1 - 1
blogit/blogit.py

@@ -381,7 +381,7 @@ def build():
                 for tag in post.tags:
                     tag.posts = [post_id]
                     tags[tag.name] = tag
-        entries.append(post)
+                entries.append(post)
         print("%s" % post.path)
 
     for name, to in tags.iteritems():

+ 1 - 1
templates/archive_index.html

@@ -4,7 +4,7 @@
 <div class="archive index">
 <h2>This is an archive of everything posted here ...</h2>
   {% for entry in entries %}
-    <p>{{ entry.publish_date}} <a title="../{{ entry.permalink }}" href="../{{entry.permalink}}">{{entry.title }}</a></p>
+    <p class="post">{{ entry.publish_date}} <a title="../{{ entry.permalink }}" href="../{{entry.permalink}}">{{entry.title }}</a></p>
   {% endfor %}
 </div>
 {% endblock content %}

+ 18 - 3
tests/test_all.py

@@ -1,6 +1,7 @@
 import os
 
 import pytest
+from bs4 import BeautifulSoup
 from tinydb import Query, where
 
 from blogit.blogit import (CONFIG, find_new_posts_and_pages, DataBase,
@@ -243,13 +244,27 @@ def test_render_archive():
         DB.posts.all()]
 
     render_archive(_sort_entries(entries, reversed=True)[ARCHIVE_SIZE:])
-    # TODO: assertions here
+    # pages should not be in the archive
+    with open(os.path.join(CONFIG['output_to'], 'archive', 'index.html')) as html_index:
+        soup = BeautifulSoup(html_index.read(), 'html.parser')
+        assert len(soup.find_all(class_='post')) == 12
 
 
-def test_render_archive():
+def test_render_index():
     update_index(_get_last_entries(DB))
-    # TODO: assertions here
+    with open(os.path.join(CONFIG['output_to'], 'index.html')) as html_index:
+        soup = BeautifulSoup(html_index.read(), 'html.parser')
+        assert len(soup.find_all(class_='clearfix entry')) == 10
 
 
 def test_build():
     build()
+    # check that the index really contains the last 10 entries
+    with open(os.path.join(CONFIG['output_to'], 'index.html')) as html_index:
+        soup = BeautifulSoup(html_index.read(), 'html.parser')
+        assert len(soup.find_all(class_='clearfix entry')) == 10
+
+    # pages should not be in the archive, but archive size here is different
+    with open(os.path.join(CONFIG['output_to'], 'archive', 'index.html')) as html_index:
+        soup = BeautifulSoup(html_index.read(), 'html.parser')
+        assert len(soup.find_all(class_='post')) == 22