| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295 | import osimport sysimport pytestfrom bs4 import BeautifulSoupfrom tinydb import wheresys.path.insert(0, os.getcwd())from conf import CONFIG  # noqadb_name = os.path.join(CONFIG['content_root'], 'blogit.db')if os.path.exists(db_name):    import shutil    shutil.rmtree(CONFIG['content_root'])if not os.path.exists(CONFIG['content_root']):    os.mkdir(CONFIG['content_root'])CONFIG['content_root'] = 'test_root'ARCHIVE_SIZE = 10from blogit.blogit import (find_new_posts_and_pages, DataBase,  # noqa                           Entry, Tag, _get_last_entries,                           render_archive, update_index, build)import blogit.blogit as m  # noqaDB = DataBase(os.path.join(CONFIG['content_root'], 'blogit.db'))# monkey patch to local DBm.DB = DBTag.table = DB.tagsTag.db = DBEntry.db = DBtags = ['foo', 'bar', 'baz', 'bug', 'buf']def shift(l, n):    return l[-n:] + l[:-n]post = '''\---title: Blog post {number}author: Famous authorpublished: 2015-01-{number}tags: {tags}public: yeschronological: yeskind: writingsummary: This is a summray of post {number}. Donec id elit non mi porta gravida at eget metus. Fusce dapibus---This is the body of post {number}. Donec id elit non mi porta gravida at eget metus. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Etiam porta sem malesuada magna mollis euismod. Donec sed odio dui.This is a snippet in bash```bash$ for i in `seq 1 10`; do   echo $idoneVAR="variable"echo $VAR# This is a very long long long long long long long long long long comment```This is a snippet in python```pythondef yay(top):    for i in range(1, top+1):            yield ifor i in yay:    print(i)```'''try:    os.mkdir(CONFIG['content_root'])except OSError:    passshift_factors = list([(x - 1) // 5 + 1 for x in range(1, 21)])f = open((os.path.join(CONFIG['content_root'],                       'page.md')), 'w')f.write("""\---title: example pagepublic: yeskind: pagetemplate: about.html---# some headingcontent paragraph## heading 2some more content""")f.close()def write_file(i):    f = open((os.path.join(CONFIG['content_root'],                           'post{0:03d}.md'.format(i))), 'w')    f.write(post.format(**{'number': i,                           'tags':                           ','.join(shift(tags, shift_factors[i - 1])[:-1])}))[write_file(i) for i in range(1, 21)]def test_find_new_posts_and_pages():    entries = [e for e in find_new_posts_and_pages(DB)]    assert len(entries)    pages = [e[1] for e in entries if str(e[0]).endswith('page.md')]    assert len(pages)    assert len(DB.posts.all()) == 20    new_entries = [e for e in find_new_posts_and_pages(DB)]    # no new posts sould be found    assert len(DB.posts.all()) == 20    assert len(new_entries) == 0    [e[0].tags for e in entries]    foo = DB.tags.search(where('name') == 'foo')    assert foo[0]['post_ids'] == list(range(1, 16))def test_tags():    entries = [        Entry.entry_from_db(os.path.join(CONFIG['content_root'],                                         e.get('filename')), e.eid)        for e in DB.posts.all()]    tags = DB.tags.all()  # noqa    t = entries[0].tags    assert len(t) == 4    assert t[0].name == 'buf'    new_tag = Tag('buggg')    new_tag.posts = [100, 100]    with pytest.raises(ValueError):        new_tag.posts = "This should not work"    with pytest.raises(ValueError):        new_tag.posts = 1  # This should not either    new_tag.posts = [100]    with pytest.raises(ValueError):        list(new_tag.entries)def test_slug():    t = Tag('foo:bar')    assert t.slug == "foo-bar"    t = Tag('foo:;bar,.,baz')    assert t.slug == "foo-bar-baz""""def test_tag_posts():    example = Tag('example')    example.posts = [1,2,3]    assert [1,2,3] == example.posts    Filter = Query()    t = DB.tags.get(Filter.post_ids == [1, 2, 3])    assert t['post_ids'] == [1, 2, 3]    example = Tag('example')    example.posts = [4,5,6]    rv = DB.tags.search(where('name') == 'example')    assert rv[0]['post_ids'] == range(1, 7)def test_tag_entries():    t = Tag('breaks')    t.posts = [10000]    with pytest.raises(ValueError):        list(t.entries)    tf = Tag(u'example')    entries = list(tf.entries)    assert len(entries)"""def test_tag_post_ids():    m = """\---title: Blog post {}author: Famous authorpublished: 2015-01-{}tags: tag1, tag2public: yeschronological: yeskind: writingsummary: This is a summary---"""    assert len(DB.posts.all()) == 20    with open(os.path.join(CONFIG['content_root'], 'e.md'), 'w') as f:        f.write(m.format(25, 25))    with open(os.path.join(CONFIG['content_root'], 'f.md'), 'w') as f:        f.write(m.format(27, 27))    e1 = Entry(os.path.join(CONFIG['content_root'], 'e.md'))    e1.tags    e2 = Entry(os.path.join(CONFIG['content_root'], 'f.md'))    e2.tags    assert len(DB.posts.all()) == 22    #assert e1.tags[0].posts == e2.tags[0].posts    e1.render()    [t.render() for t in e1.tags]    assert len(DB.posts.all()) == 22def test_tag_render():    p = DB.posts.get(eid=1)    entry = Entry.entry_from_db(        os.path.join(CONFIG['content_root'], p.get('filename')), 1)    tags = entry.tags    assert list(map(str, tags)) == ['buf', 'foo', 'bar', 'baz']    # the entries are wrongly sorted, need to look at that    assert tags[0].render()    assert len(list(tags[0].entries))    assert len(DB.posts.all()) == 22def test_get_last_entries():    assert len(DB.posts.all()) == 22    le, all = _get_last_entries(DB, 10)    assert [e.id for e in le] == list(range(22, 12, -1))def test_render_archive():    entries = [Entry.entry_from_db(        os.path.join(CONFIG['content_root'], e.get('filename')), e.eid) for e in        DB.posts.all()]    render_archive(entries[ARCHIVE_SIZE:])    # pages should not be in the archive    with open(os.path.join(CONFIG['output_to'], 'archive', 'index.html')) as html_index:        soup = BeautifulSoup(html_index.read(), 'html.parser')        assert len(soup.find_all(class_='post')) == 12def test_render_index():    le, all_entries = _get_last_entries(DB, 10)    update_index(le)    with open(os.path.join(CONFIG['output_to'], 'index.html')) as html_index:        soup = BeautifulSoup(html_index.read(), 'html.parser')        assert len(soup.find_all(class_='clearfix entry')) == 10def test_build():    DB._db.purge_tables()    build(CONFIG)    # check that the index really contains the last 10 entries    with open(os.path.join(CONFIG['output_to'], 'index.html')) as html_index:        soup = BeautifulSoup(html_index.read(), 'html.parser')        assert len(soup.find_all(class_='clearfix entry')) == 10    # pages should not be in the archive    with open(os.path.join(CONFIG['output_to'], 'archive', 'index.html')) as html_index:        soup = BeautifulSoup(html_index.read(), 'html.parser')        assert len(soup.find_all(class_='post')) == 12    with open(os.path.join(CONFIG['output_to'], 'tags', 'foo', 'index.html')) as tag_foo:        soup = BeautifulSoup(tag_foo.read(), 'html.parser')        titles = [c.a.string for c in                  soup.find_all(class_="clearfix entry")]        for title, idx in zip(titles, list(range(15, 0, -1))):            assert title.split()[-1] == str(idx)
 |