浏览代码

Drop function _sort_entries

 using sorted creates a copy of the list, list.sorted is in place,
 hence a tiny bit faster
Oz N Tiram 8 年之前
父节点
当前提交
6b0d773070
共有 2 个文件被更改,包括 24 次插入21 次删除
  1. 13 11
      blogit/blogit.py
  2. 11 10
      tests/test_all.py

+ 13 - 11
blogit/blogit.py

@@ -134,7 +134,9 @@ class Tag(object):
         """Render html page and atom feed"""
         context = GLOBAL_TEMPLATE_CONTEXT.copy()
         context['tag'] = self
-        context['entries'] = _sort_entries(self.entries)
+        entries = list(self.entries)
+        entries.sort(key=operator.attrgetter('date'), reverse=True)
+        context['entries'] = entries
 
         # render html page
         render_to = os.path.join(CONFIG['output_to'], 'tags', self.slug)
@@ -305,11 +307,6 @@ class Entry(object):
                 sys.exit(1)
 
 
-def _sort_entries(entries, reversed=True):
-    """Sort all entries by date and reverse the list"""
-    return list(sorted(entries, key=operator.attrgetter('date'), reverse=reversed))
-
-
 def _render(context, template_path, output_path, encoding='utf-8'):
     template = jinja_env.get_template(template_path)
     html = template.render(context)
@@ -359,11 +356,14 @@ def find_new_posts_and_pages(db):
 
 
 def _get_last_entries(db, qty):
+    """get all entries and the last qty entries"""
     eids = [post.eid for post in db.posts.all()]
     eids = sorted(eids, reverse=True)
     entries = [Entry(os.path.join(CONFIG['content_root'],
                      db.posts.get(eid=eid)['filename']), eid) for eid in eids]
-    return _sort_entries(entries)[:qty]
+    # return _sort_entries(entries)[:qty]
+    entries.sort(key=operator.attrgetter('date'), reverse=True)
+    return entries[:qty], entries
 
 
 def update_index(entries):
@@ -408,7 +408,8 @@ def build(config):
     # to the index using BeautifulSoup
     # update index
     logger.info("Updating index")
-    update_index(_get_last_entries(DB, config['INDEX_SIZE']))
+    last_entries, all_entries = _get_last_entries(DB, config['INDEX_SIZE'])
+    update_index(last_entries)
 
     # update archive
     logger.info("Updating archive")
@@ -417,10 +418,11 @@ def build(config):
     # to the archive using BeautifulSoup
 
     entries = [Entry.entry_from_db(
-        os.path.join(CONFIG['content_root'], e.get('filename')), e.eid) for e in
-        DB.posts.all()]
+               os.path.join(CONFIG['content_root'], e.get('filename')), e.eid) for e in
+               DB.posts.all()]
 
-    render_archive(_sort_entries(entries, reversed=True)[config['ARCHIVE_SIZE']:])
+    all_entries.reverse()
+    render_archive(all_entries[config['ARCHIVE_SIZE']:])
 
 
 def preview():  # pragma: no coverage

+ 11 - 10
tests/test_all.py

@@ -2,7 +2,7 @@ import os
 import sys
 import pytest
 from bs4 import BeautifulSoup
-from tinydb import Query, where
+from tinydb import where
 
 sys.path.insert(0, os.getcwd())
 from conf import CONFIG
@@ -20,13 +20,12 @@ CONFIG['content_root'] = 'test_root'
 ARCHIVE_SIZE = 10
 
 from blogit.blogit import (find_new_posts_and_pages, DataBase,
-                           Entry, Tag, _sort_entries, _get_last_entries,
+                           Entry, Tag, _get_last_entries,
                            render_archive, update_index, build)
 
 import blogit.blogit as m
 
 
-
 DB = DataBase(os.path.join(CONFIG['content_root'], 'blogit.db'))
 
 # monkey patch to local DB
@@ -105,6 +104,7 @@ some more content
 """)
 f.close()
 
+
 def write_file(i):
     f = open((os.path.join(CONFIG['content_root'],
                            'post{0:03d}.md'.format(i))), 'w')
@@ -133,6 +133,7 @@ def test_find_new_posts_and_pages():
     foo = DB.tags.search(where('name')=='foo')
     assert foo[0]['post_ids'] == list(range(1, 16))
 
+
 def test_tags():
     entries = [
             Entry.entry_from_db(os.path.join(CONFIG['content_root'],
@@ -156,7 +157,6 @@ def test_tags():
         list(new_tag.entries)
 
 
-
 def test_slug():
 
     t = Tag('foo:bar')
@@ -194,8 +194,9 @@ def test_tag_entries():
     assert len(entries)
 """
 
+
 def test_tag_post_ids():
-    m ="""\
+    m = """\
 ---
 title: Blog post {}
 author: Famous author
@@ -223,8 +224,6 @@ summary: This is a summary
     e1.render()
     [t.render() for t in e1.tags]
 
-    l = _sort_entries([e2, e1])
-    assert l == [e2, e1]
     assert len(DB.posts.all()) == 22
 
 
@@ -243,10 +242,11 @@ def test_tag_render():
 
     assert len(DB.posts.all()) == 22
 
+
 def test_get_last_entries():
 
     assert len(DB.posts.all()) == 22
-    le = _get_last_entries(DB, 10)
+    le, all = _get_last_entries(DB, 10)
     assert [e.id for e in le] == list(range(22, 12, -1))
 
 
@@ -256,7 +256,7 @@ def test_render_archive():
         os.path.join(CONFIG['content_root'], e.get('filename')), e.eid) for e in
         DB.posts.all()]
 
-    render_archive(_sort_entries(entries, reversed=True)[ARCHIVE_SIZE:])
+    render_archive(entries[ARCHIVE_SIZE:])
     # pages should not be in the archive
     with open(os.path.join(CONFIG['output_to'], 'archive', 'index.html')) as html_index:
         soup = BeautifulSoup(html_index.read(), 'html.parser')
@@ -264,7 +264,8 @@ def test_render_archive():
 
 
 def test_render_index():
-    update_index(_get_last_entries(DB, 10))
+    le, all_entries = _get_last_entries(DB, 10)
+    update_index(le)
     with open(os.path.join(CONFIG['output_to'], 'index.html')) as html_index:
         soup = BeautifulSoup(html_index.read(), 'html.parser')
         assert len(soup.find_all(class_='clearfix entry')) == 10