1
0

test_all.py 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287
  1. import os
  2. import pytest
  3. from bs4 import BeautifulSoup
  4. from tinydb import Query, where
  5. from blogit.blogit import (CONFIG, find_new_posts_and_pages, DataBase,
  6. Entry, Tag, _sort_entries, _get_last_entries,
  7. render_archive, update_index, build)
  8. import blogit.blogit as m
  9. CONFIG['content_root'] = 'test_root'
  10. ARCHIVE_SIZE = 10
  11. db_name = os.path.join(CONFIG['content_root'], 'blogit.db')
  12. if os.path.exists(db_name):
  13. import shutil
  14. shutil.rmtree(CONFIG['content_root'])
  15. if not os.path.exists(CONFIG['content_root']):
  16. os.mkdir(CONFIG['content_root'])
  17. DB = DataBase(os.path.join(CONFIG['content_root'], 'blogit.db'))
  18. # monkey patch to local DB
  19. m.DB = DB
  20. Tag.table = DB.tags
  21. Tag.db = DB
  22. Entry.db = DB
  23. tags = ['foo', 'bar', 'baz', 'bug', 'buf']
  24. shift = lambda l, n: l[-n:] + l[:-n]
  25. post = '''\
  26. ---
  27. title: Blog post {number}
  28. author: Famous author
  29. published: 2015-01-{number}
  30. tags: {tags}
  31. public: yes
  32. chronological: yes
  33. kind: writing
  34. summary: This is a summray of post {number}. Donec id elit non mi porta gravida at eget metus. Fusce dapibus
  35. ---
  36. This is the body of post {number}. Donec id elit non mi porta gravida at eget metus. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Etiam porta sem malesuada magna mollis euismod. Donec sed odio dui.
  37. This is a snippet in bash
  38. ```bash
  39. $ for i in `seq 1 10`; do
  40. echo $i
  41. done
  42. VAR="variable"
  43. echo $VAR
  44. # This is a very long long long long long long long long long long comment
  45. ```
  46. This is a snippet in python
  47. ```python
  48. def yay(top):
  49. for i in range(1, top+1):
  50. yield i
  51. for i in yay:
  52. print(i)
  53. ```
  54. '''
  55. try:
  56. os.mkdir(CONFIG['content_root'])
  57. except OSError:
  58. pass
  59. shift_factors = list([(x - 1) // 5 +1 for x in range(1,21)])
  60. f = open((os.path.join(CONFIG['content_root'],
  61. 'page.md')), 'w')
  62. f.write("""\
  63. ---
  64. title: example page
  65. public: yes
  66. kind: page
  67. template: about.html
  68. ---
  69. # some heading
  70. content paragraph
  71. ## heading 2
  72. some more content
  73. """)
  74. f.close()
  75. def write_file(i):
  76. f = open((os.path.join(CONFIG['content_root'],
  77. 'post{}.md'.format(i))), 'w')
  78. f.write(post.format(**{'number': i,
  79. 'tags':
  80. ','.join(shift(tags, shift_factors[i-1])[:-1])}))
  81. [write_file(i) for i in range(1, 21)]
  82. def test_find_new_posts_and_pages():
  83. entries = [e for e in find_new_posts_and_pages(DB)]
  84. assert len(entries)
  85. pages = [e[1] for e in entries if str(e[0]).endswith('page.md')]
  86. assert len(pages)
  87. assert len(DB.posts.all()) == 20
  88. new_entries = [e for e in find_new_posts_and_pages(DB)]
  89. # no new posts sould be found
  90. assert len(DB.posts.all()) == 20
  91. assert len(new_entries) == 0
  92. [e[0].tags for e in entries]
  93. foo = DB.tags.search(where('name')=='foo')
  94. assert foo[0]['post_ids'] == list(range(1, 16))
  95. def test_tags():
  96. entries = [
  97. Entry.entry_from_db(os.path.join(CONFIG['content_root'],
  98. e.get('filename')), e.eid) for e in DB.posts.all()]
  99. tags = DB.tags.all()
  100. t = entries[0].tags
  101. assert len(t) == 4
  102. assert t[0].name == 'buf'
  103. new_tag = Tag('buggg')
  104. new_tag.posts = [100,100]
  105. with pytest.raises(ValueError):
  106. new_tag.posts = "This should not work"
  107. with pytest.raises(ValueError):
  108. new_tag.posts = 1 # This should not either
  109. new_tag.posts = [100]
  110. with pytest.raises(ValueError):
  111. list(new_tag.entries)
  112. def test_slug():
  113. t = Tag('foo:bar')
  114. assert t.slug == "foo-bar"
  115. t = Tag('foo:;bar,.,baz')
  116. assert t.slug == "foo-bar-baz"
  117. """
  118. def test_tag_posts():
  119. example = Tag('example')
  120. example.posts = [1,2,3]
  121. assert [1,2,3] == example.posts
  122. Filter = Query()
  123. t = DB.tags.get(Filter.post_ids == [1, 2, 3])
  124. assert t['post_ids'] == [1, 2, 3]
  125. example = Tag('example')
  126. example.posts = [4,5,6]
  127. rv = DB.tags.search(where('name') == 'example')
  128. assert rv[0]['post_ids'] == range(1, 7)
  129. def test_tag_entries():
  130. t = Tag('breaks')
  131. t.posts = [10000]
  132. with pytest.raises(ValueError):
  133. list(t.entries)
  134. tf = Tag(u'example')
  135. entries = list(tf.entries)
  136. assert len(entries)
  137. """
  138. def test_tag_post_ids():
  139. m ="""\
  140. ---
  141. title: Blog post {}
  142. author: Famous author
  143. published: 2015-01-{}
  144. tags: tag1, tag2
  145. public: yes
  146. chronological: yes
  147. kind: writing
  148. summary: This is a summary
  149. ---
  150. """
  151. assert len(DB.posts.all()) == 20
  152. with open(os.path.join(CONFIG['content_root'], 'e.md'), 'w') as f:
  153. f.write(m.format(25, 25))
  154. with open(os.path.join(CONFIG['content_root'], 'f.md'), 'w') as f:
  155. f.write(m.format(27, 27))
  156. e1 = Entry(os.path.join(CONFIG['content_root'], 'e.md'))
  157. e1.tags
  158. e2 = Entry(os.path.join(CONFIG['content_root'], 'f.md'))
  159. e2.tags
  160. assert len(DB.posts.all()) == 22
  161. assert e1.tags[0].posts == e2.tags[0].posts
  162. e1.render()
  163. [t.render() for t in e1.tags]
  164. l = _sort_entries([e2, e1])
  165. assert l == [e2, e1]
  166. assert len(DB.posts.all()) == 22
  167. def test_tag_render():
  168. p = DB.posts.get(eid=1)
  169. entry = Entry.entry_from_db(
  170. os.path.join(CONFIG['content_root'], p.get('filename')), 1)
  171. #entry = Entry(os.path.join(CONFIG['content_root'], 'post1.md'))
  172. tags = entry.tags
  173. assert list(map(str, tags)) == ['buf', 'foo', 'bar', 'baz']
  174. # the entries are wrongly sorted, need to look at that
  175. assert tags[0].render()
  176. assert len(list(tags[0].entries))
  177. assert len(DB.posts.all()) == 22
  178. def test_get_last_entries():
  179. assert len(DB.posts.all()) == 22
  180. le = _get_last_entries(DB, 10)
  181. assert [e.id for e in le] == list(range(22, 12, -1))
  182. def test_render_archive():
  183. entries = [Entry.entry_from_db(
  184. os.path.join(CONFIG['content_root'], e.get('filename')), e.eid) for e in
  185. DB.posts.all()]
  186. render_archive(_sort_entries(entries, reversed=True)[ARCHIVE_SIZE:])
  187. # pages should not be in the archive
  188. with open(os.path.join(CONFIG['output_to'], 'archive', 'index.html')) as html_index:
  189. soup = BeautifulSoup(html_index.read(), 'html.parser')
  190. assert len(soup.find_all(class_='post')) == 12
  191. def test_render_index():
  192. update_index(_get_last_entries(DB, 10))
  193. with open(os.path.join(CONFIG['output_to'], 'index.html')) as html_index:
  194. soup = BeautifulSoup(html_index.read(), 'html.parser')
  195. assert len(soup.find_all(class_='clearfix entry')) == 10
  196. def test_build():
  197. DB._db.purge_tables()
  198. build(CONFIG)
  199. # check that the index really contains the last 10 entries
  200. with open(os.path.join(CONFIG['output_to'], 'index.html')) as html_index:
  201. soup = BeautifulSoup(html_index.read(), 'html.parser')
  202. assert len(soup.find_all(class_='clearfix entry')) == 10
  203. # pages should not be in the archive
  204. with open(os.path.join(CONFIG['output_to'], 'archive', 'index.html')) as html_index:
  205. soup = BeautifulSoup(html_index.read(), 'html.parser')
  206. assert len(soup.find_all(class_='post')) == 12
  207. with open(os.path.join(CONFIG['output_to'], 'tags', 'foo', 'index.html')) as tag_foo:
  208. soup = BeautifulSoup(tag_foo.read(), 'html.parser')
  209. titles = [c.a.string for c in
  210. soup.find_all(class_="clearfix entry")]
  211. for title, idx in zip(titles, list(range(15, 0, -1))):
  212. assert title.split()[-1] == str(idx)