Merge branch 'main' into production

This commit is contained in:
Mouse Reeve 2020-10-30 11:56:05 -07:00
commit 0a34cf8821
23 changed files with 386 additions and 125 deletions

View file

@ -64,14 +64,14 @@ def load_more_data(book_id):
connector.expand_book_data(book)
def search(query):
def search(query, min_confidence=0.1):
''' find books based on arbitary keywords '''
results = []
dedup_slug = lambda r: '%s/%s/%s' % (r.title, r.author, r.year)
result_index = set()
for connector in get_connectors():
try:
result_set = connector.search(query)
result_set = connector.search(query, min_confidence=min_confidence)
except HTTPError:
continue
@ -87,16 +87,16 @@ def search(query):
return results
def local_search(query):
def local_search(query, min_confidence=0.1):
''' only look at local search results '''
connector = load_connector(models.Connector.objects.get(local=True))
return connector.search(query)
return connector.search(query, min_confidence=min_confidence)
def first_search_result(query):
def first_search_result(query, min_confidence=0.1):
''' search until you find a result that fits '''
for connector in get_connectors():
result = connector.search(query)
result = connector.search(query, min_confidence=min_confidence)
if result:
return result[0]
return None

View file

@ -1,15 +1,17 @@
''' functionality outline for a book data connector '''
from abc import ABC, abstractmethod
from dataclasses import dataclass
from dateutil import parser
import pytz
import requests
from requests import HTTPError
from django.db import transaction
from bookwyrm import models
class ConnectorException(Exception):
class ConnectorException(HTTPError):
''' when the connector can't do what was asked '''
@ -50,7 +52,7 @@ class AbstractConnector(ABC):
return True
def search(self, query):
def search(self, query, min_confidence=None):
''' free text search '''
resp = requests.get(
'%s%s' % (self.search_url, query),
@ -155,9 +157,11 @@ class AbstractConnector(ABC):
''' for creating a new book or syncing with data '''
book = update_from_mappings(book, data, self.book_mappings)
author_text = []
for author in self.get_authors_from_data(data):
book.authors.add(author)
book.author_text = ', '.join(a.display_name for a in book.authors.all())
author_text.append(author.display_name)
book.author_text = ', '.join(author_text)
book.save()
if not update_cover:
@ -287,25 +291,29 @@ def get_date(date_string):
def get_data(url):
''' wrapper for request.get '''
resp = requests.get(
url,
headers={
'Accept': 'application/json; charset=utf-8',
},
)
try:
resp = requests.get(
url,
headers={
'Accept': 'application/json; charset=utf-8',
},
)
except ConnectionError:
raise ConnectorException()
if not resp.ok:
resp.raise_for_status()
data = resp.json()
return data
@dataclass
class SearchResult:
''' standardized search result object '''
def __init__(self, title, key, author, year):
self.title = title
self.key = key
self.author = author
self.year = year
title: str
key: str
author: str
year: str
confidence: int = 1
def __repr__(self):
return "<SearchResult key={!r} title={!r} author={!r}>".format(

View file

@ -52,11 +52,13 @@ class Connector(AbstractConnector):
def get_edition_from_work_data(self, data):
return data['editions'][0]
''' we're served a list of edition urls '''
path = data['editions'][0]
return get_data(path)
def get_work_from_edition_date(self, data):
return data['work']
return get_data(data['work'])
def get_authors_from_data(self, data):

View file

@ -129,10 +129,10 @@ class Connector(AbstractConnector):
key = self.books_url + search_result['key']
author = search_result.get('author_name') or ['Unknown']
return SearchResult(
search_result.get('title'),
key,
', '.join(author),
search_result.get('first_publish_year'),
title=search_result.get('title'),
key=key,
author=', '.join(author),
year=search_result.get('first_publish_year'),
)

View file

@ -7,7 +7,7 @@ from .abstract_connector import AbstractConnector, SearchResult
class Connector(AbstractConnector):
''' instantiate a connector '''
def search(self, query):
def search(self, query, min_confidence=0.1):
''' right now you can't search bookwyrm sorry, but when
that gets implemented it will totally rule '''
vector = SearchVector('title', weight='A') +\
@ -28,7 +28,7 @@ class Connector(AbstractConnector):
).annotate(
rank=SearchRank(vector, query)
).filter(
rank__gt=0
rank__gt=min_confidence
).order_by('-rank')
results = results.filter(default=True) or results
@ -42,11 +42,12 @@ class Connector(AbstractConnector):
def format_search_result(self, search_result):
return SearchResult(
search_result.title,
search_result.local_id,
search_result.author_text,
search_result.published_date.year if \
title=search_result.title,
key=search_result.local_id,
author=search_result.author_text,
year=search_result.published_date.year if \
search_result.published_date else None,
confidence=search_result.rank,
)

View file

@ -29,6 +29,7 @@ class CustomForm(ModelForm):
visible.field.widget.attrs['rows'] = None
visible.field.widget.attrs['class'] = css_classes[input_type]
class LoginForm(CustomForm):
class Meta:
model = models.User

View file

@ -11,9 +11,13 @@ from bookwyrm.status import create_notification
MAX_ENTRIES = 500
def create_job(user, csv_file):
def create_job(user, csv_file, include_reviews, privacy):
''' check over a csv and creates a database entry for the job'''
job = ImportJob.objects.create(user=user)
job = ImportJob.objects.create(
user=user,
include_reviews=include_reviews,
privacy=privacy
)
for index, entry in enumerate(list(csv.DictReader(csv_file))[:MAX_ENTRIES]):
if not all(x in entry for x in ('ISBN13', 'Title', 'Author')):
raise ValueError("Author, title, and isbn must be in data.")
@ -42,13 +46,12 @@ def import_data(job_id):
if item.book:
item.save()
results.append(item)
else:
item.fail_reason = "Could not match book on OpenLibrary"
item.save()
status = outgoing.handle_import_books(job.user, results)
if status:
job.import_status = status
job.save()
if job.include_reviews:
# shelves book and handles reviews
outgoing.handle_imported_book(job.user, item, job.privacy)
else:
item.fail_reason = "Could not find a match for book"
item.save()
finally:
create_notification(job.user, 'IMPORT', related_import=job)

View file

@ -0,0 +1,17 @@
# Generated by Django 3.0.7 on 2020-10-29 23:48
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('bookwyrm', '0057_auto_20201026_2131'),
]
operations = [
migrations.RemoveField(
model_name='importjob',
name='import_status',
),
]

View file

@ -0,0 +1,23 @@
# Generated by Django 3.0.7 on 2020-10-30 17:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bookwyrm', '0058_remove_importjob_import_status'),
]
operations = [
migrations.AddField(
model_name='importjob',
name='include_reviews',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='importjob',
name='privacy',
field=models.CharField(choices=[('public', 'Public'), ('unlisted', 'Unlisted'), ('followers', 'Followers'), ('direct', 'Direct')], default='public', max_length=255),
),
]

View file

@ -14,6 +14,14 @@ from django.dispatch import receiver
from bookwyrm import activitypub
from bookwyrm.settings import DOMAIN
PrivacyLevels = models.TextChoices('Privacy', [
'public',
'unlisted',
'followers',
'direct'
])
class BookWyrmModel(models.Model):
''' shared fields '''
created_date = models.DateTimeField(auto_now_add=True)

View file

@ -135,7 +135,7 @@ class Work(Book):
@property
def editions_path(self):
''' it'd be nice to serialize the edition instead but, recursion '''
return self.remote_id + '/editions'
return [e.remote_id for e in self.edition_set.all()]
@property
@ -174,6 +174,49 @@ class Edition(Book):
activity_serializer = activitypub.Edition
def save(self, *args, **kwargs):
''' calculate isbn 10/13 '''
if self.isbn_13 and self.isbn_13[:3] == '978' and not self.isbn_10:
self.isbn_10 = isbn_13_to_10(self.isbn_13)
if self.isbn_10 and not self.isbn_13:
self.isbn_13 = isbn_10_to_13(self.isbn_10)
super().save(*args, **kwargs)
def isbn_10_to_13(isbn_10):
''' convert an isbn 10 into an isbn 13 '''
# drop the last character of the isbn 10 number (the original checkdigit)
converted = isbn_10[:9]
# add "978" to the front
converted = '978' + converted
# add a check digit to the end
# multiply the odd digits by 1 and the even digits by 3 and sum them
checksum = sum(int(i) for i in converted[::2]) + \
sum(int(i) * 3 for i in converted[1::2])
# add the checksum mod 10 to the end
checkdigit = checksum % 10
if checkdigit != 0:
checkdigit = 10 - checkdigit
return converted + str(checkdigit)
def isbn_13_to_10(isbn_13):
''' convert isbn 13 to 10, if possible '''
if isbn_13[:3] != '978':
return None
# remove '978' and old checkdigit
converted = isbn_13[3:-1]
# calculate checkdigit
# multiple each digit by 10,9,8.. successively and sum them
checksum = sum(int(d) * (10 - idx) for (idx, d) in enumerate(converted))
checkdigit = checksum % 11
checkdigit = 11 - checkdigit
if checkdigit == 10:
checkdigit = 'X'
return converted + str(checkdigit)
class Author(ActivitypubMixin, BookWyrmModel):
''' copy of an author from OL '''

View file

@ -9,6 +9,8 @@ from bookwyrm import books_manager
from bookwyrm.connectors import ConnectorException
from bookwyrm.models import ReadThrough, User, Book
from bookwyrm.utils.fields import JSONField
from .base_model import PrivacyLevels
# Mapping goodreads -> bookwyrm shelf titles.
GOODREADS_SHELVES = {
@ -40,8 +42,13 @@ class ImportJob(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
created_date = models.DateTimeField(default=timezone.now)
task_id = models.CharField(max_length=100, null=True)
import_status = models.ForeignKey(
'Status', null=True, on_delete=models.PROTECT)
include_reviews = models.BooleanField(default=True)
privacy = models.CharField(
max_length=255,
default='public',
choices=PrivacyLevels.choices
)
class ImportItem(models.Model):
''' a single line of a csv being imported '''
@ -64,13 +71,17 @@ class ImportItem(models.Model):
def get_book_from_isbn(self):
''' search by isbn '''
search_result = books_manager.first_search_result(self.isbn)
search_result = books_manager.first_search_result(
self.isbn, min_confidence=0.992
)
if search_result:
try:
# don't crash the import when the connector fails
return books_manager.get_or_create_book(search_result.key)
except ConnectorException:
pass
return None
def get_book_from_title_author(self):
''' search by title and author '''
@ -78,9 +89,16 @@ class ImportItem(models.Model):
self.data['Title'],
self.data['Author']
)
search_result = books_manager.first_search_result(search_term)
search_result = books_manager.first_search_result(
search_term, min_confidence=0.992
)
if search_result:
return books_manager.get_or_create_book(search_result.key)
try:
return books_manager.get_or_create_book(search_result.key)
except ConnectorException:
pass
return None
@property
def isbn(self):
@ -92,6 +110,7 @@ class ImportItem(models.Model):
''' the goodreads shelf field '''
if self.data['Exclusive Shelf']:
return GOODREADS_SHELVES.get(self.data['Exclusive Shelf'])
return None
@property
def review(self):
@ -108,12 +127,14 @@ class ImportItem(models.Model):
''' when the book was added to this dataset '''
if self.data['Date Added']:
return dateutil.parser.parse(self.data['Date Added'])
return None
@property
def date_read(self):
''' the date a book was completed '''
if self.data['Date Read']:
return dateutil.parser.parse(self.data['Date Read'])
return None
@property
def reads(self):
@ -123,6 +144,7 @@ class ImportItem(models.Model):
return [ReadThrough(start_date=self.date_added)]
if self.date_read:
return [ReadThrough(
start_date=self.date_added,
finish_date=self.date_read,
)]
return []

View file

@ -7,16 +7,9 @@ from model_utils.managers import InheritanceManager
from bookwyrm import activitypub
from .base_model import ActivitypubMixin, OrderedCollectionPageMixin
from .base_model import ActivityMapping, BookWyrmModel
from .base_model import ActivityMapping, BookWyrmModel, PrivacyLevels
PrivacyLevels = models.TextChoices('Privacy', [
'public',
'unlisted',
'followers',
'direct'
])
class Status(OrderedCollectionPageMixin, BookWyrmModel):
''' any post, like a reply to a review, etc '''
user = models.ForeignKey('User', on_delete=models.PROTECT)

View file

@ -155,51 +155,50 @@ def handle_unshelve(user, book, shelf):
broadcast(user, activity)
def handle_import_books(user, items):
def handle_imported_book(user, item, privacy):
''' process a goodreads csv and then post about it '''
new_books = []
for item in items:
if item.shelf:
desired_shelf = models.Shelf.objects.get(
identifier=item.shelf,
user=user
)
if isinstance(item.book, models.Work):
item.book = item.book.default_edition
if not item.book:
continue
shelf_book, created = models.ShelfBook.objects.get_or_create(
book=item.book, shelf=desired_shelf, added_by=user)
if created:
new_books.append(item.book)
activity = shelf_book.to_add_activity(user)
broadcast(user, activity)
if isinstance(item.book, models.Work):
item.book = item.book.default_edition
if not item.book:
return
if item.rating or item.review:
review_title = 'Review of {!r} on Goodreads'.format(
item.book.title,
) if item.review else ''
if item.shelf:
desired_shelf = models.Shelf.objects.get(
identifier=item.shelf,
user=user
)
# shelve the book if it hasn't been shelved already
shelf_book, created = models.ShelfBook.objects.get_or_create(
book=item.book, shelf=desired_shelf, added_by=user)
if created:
broadcast(user, shelf_book.to_add_activity(user), privacy=privacy)
models.Review.objects.create(
user=user,
book=item.book,
name=review_title,
content=item.review,
rating=item.rating,
)
for read in item.reads:
read.book = item.book
read.user = user
read.save()
# only add new read-throughs if the item isn't already shelved
for read in item.reads:
read.book = item.book
read.user = user
read.save()
if new_books:
message = 'imported {} books'.format(len(new_books))
status = create_generated_note(user, message, mention_books=new_books)
status.save()
if item.rating or item.review:
review_title = 'Review of {!r} on Goodreads'.format(
item.book.title,
) if item.review else ''
broadcast(user, status.to_create_activity(user))
return status
return None
# we don't know the publication date of the review,
# but "now" is a bad guess
published_date_guess = item.date_read or item.date_added
review = models.Review.objects.create(
user=user,
book=item.book,
name=review_title,
content=item.review,
rating=item.rating,
published_date=published_date_guess,
privacy=privacy,
)
# we don't need to send out pure activities because non-bookwyrm
# instances don't need this data
broadcast(user, review.to_create_activity(user), privacy=privacy)
def handle_delete_status(user, status):

View file

@ -55,6 +55,62 @@
{% endif %}
</div>
{% for readthrough in readthroughs %}
<div class="content block">
<input class="toggle-control" type="radio" name="show-edit-readthrough" id="show-readthrough-{{ readthrough.id }}" checked>
<div class="toggle-content hidden">
<dl>
<dt>Started reading:</dt>
<dd>{{ readthrough.start_date | naturalday }}</dd>
<dt>Finished reading:</dt>
<dd>{{ readthrough.finish_date | naturalday }}</dd>
</dl>
<div class="field is-grouped">
<label class="button is-small" for="edit-readthrough-{{ readthrough.id }}">
<span class="icon icon-pencil">
<span class="is-sr-only">Edit readthrough dates</span>
</span>
</label>
<form name="delete-readthrough-{{ readthrough.id }}" action="/delete-readthrough" method="POST">
{% csrf_token %}
<input type="hidden" name="id" value="{{ readthrough.id }}">
<button class="button is-small" type="submit">
<span class="icon icon-x">
<span class="is-sr-only">Delete this readthrough</span>
</span>
</button>
</form>
</div>
</div>
</div>
<div class="content block">
<input class="toggle-control" type="radio" name="show-edit-readthrough" id="edit-readthrough-{{ readthrough.id }}">
<div class="toggle-content hidden">
<form name="edit-readthrough" action="/edit-readthrough" method="post">
{% csrf_token %}
<input type="hidden" name="id" value="{{ readthrough.id }}">
<div class="field">
<label class="label" for="start_date">
Started reading
<input type="date" name="start_date" class="input" id="id_start_date-{{ readthrough.id }}" value="{{ readthrough.start_date | date:"Y-m-d" }}">
</label>
</div>
<div class="field">
<label class="label" for="finish_date">
Finished reading
<input type="date" name="finish_date" class="input" id="id_finish_date-{{ readthrough.id }}" value="{{ readthrough.finish_date | date:"Y-m-d" }}">
</label>
</div>
<div class="field is-grouped">
<button class="button is-small" type="submit">Save</button>
<label class="button is-small" for="show-readthrough-{{ readthrough.id }}">Cancel</label>
</div>
</form>
</div>
</div>
{% endfor %}
{% if request.user.is_authenticated %}
<div class="block">
{% include 'snippets/create_status.html' with book=book hide_cover=True %}

View file

@ -5,8 +5,24 @@
<h2 class="title">Import Books from GoodReads</h2>
<form name="import" action="/import_data/" method="post" enctype="multipart/form-data">
{% csrf_token %}
{{ import_form.as_p }}
<button class="button" type="submit">Import</button>
<div class="field">
{{ import_form.as_p }}
</div>
<div class="field">
<label class="label" for="include_reviews"><input type="checkbox" name="include_reviews" checked> Include reviews</label>
</div>
<div class="field">
<label class="label" for="privacy">Privacy setting for imported reviews</label>
<div class="select">
<select name="privacy">
<option value="public" selected>Public</option>
<option value="unlisted">Unlisted</option>
<option value="followers">Followers only</option>
<option value="direct">Private</option>
</select>
</div>
</div>
<button class="button is-primary" type="submit">Import</button>
</form>
<p>
Imports are limited in size, and only the first {{ limit }} items will be imported.

View file

@ -34,16 +34,6 @@ class BookWyrmConnector(TestCase):
self.assertEqual(self.connector.is_work_data(self.edition_data), False)
def test_get_edition_from_work_data(self):
edition = self.connector.get_edition_from_work_data(self.work_data)
self.assertEqual(edition['url'], 'https://example.com/book/122')
def test_get_work_from_edition_data(self):
work = self.connector.get_work_from_edition_date(self.edition_data)
self.assertEqual(work['url'], 'https://example.com/book/121')
def test_format_search_result(self):
datafile = pathlib.Path(__file__).parent.joinpath('../data/fr_search.json')
search_data = json.loads(datafile.read_bytes())

View file

@ -48,7 +48,7 @@ class SelfConnector(TestCase):
def test_format_search_result(self):
result = self.connector.format_search_result(self.edition)
result = self.connector.search('Edition of Example')[0]
self.assertEqual(result.title, 'Edition of Example Work')
self.assertEqual(result.key, self.edition.remote_id)
self.assertEqual(result.author, 'Anonymous')

View file

@ -2,6 +2,7 @@
from django.test import TestCase
from bookwyrm import models, settings
from bookwyrm.models.book import isbn_10_to_13, isbn_13_to_10
class Book(TestCase):
@ -48,6 +49,16 @@ class Book(TestCase):
self.assertEqual(self.work.default_edition, self.second_edition)
def test_isbn_10_to_13(self):
isbn_10 = '178816167X'
isbn_13 = isbn_10_to_13(isbn_10)
self.assertEqual(isbn_13, '9781788161671')
def test_isbn_13_to_10(self):
isbn_13 = '9781788161671'
isbn_10 = isbn_13_to_10(isbn_13)
self.assertEqual(isbn_10, '178816167X')
class Shelf(TestCase):
def setUp(self):

View file

@ -24,7 +24,7 @@ class ImportJob(TestCase):
'Number of Pages': 416,
'Year Published': 2019,
'Original Publication Year': 2019,
'Date Read': '2019/04/09',
'Date Read': '2019/04/12',
'Date Added': '2019/04/09',
'Bookshelves': '',
'Bookshelves with positions': '',
@ -84,7 +84,7 @@ class ImportJob(TestCase):
def test_date_read(self):
''' converts to the local shelf typology '''
expected = datetime.datetime(2019, 4, 9, 0, 0)
expected = datetime.datetime(2019, 4, 12, 0, 0)
item = models.ImportItem.objects.get(index=2)
self.assertEqual(item.date_read, expected)
@ -97,11 +97,9 @@ class ImportJob(TestCase):
self.assertEqual(actual.reads[0].finish_date, expected[0].finish_date)
def test_read_reads(self):
expected = [models.ReadThrough(
finish_date=datetime.datetime(2019, 4, 9, 0, 0))]
actual = models.ImportItem.objects.get(index=2)
self.assertEqual(actual.reads[0].start_date, expected[0].start_date)
self.assertEqual(actual.reads[0].finish_date, expected[0].finish_date)
self.assertEqual(actual.reads[0].start_date, datetime.datetime(2019, 4, 9, 0, 0))
self.assertEqual(actual.reads[0].finish_date, datetime.datetime(2019, 4, 12, 0, 0))
def test_unread_reads(self):
expected = []

View file

@ -102,6 +102,9 @@ urlpatterns = [
re_path(r'^edit_book/(?P<book_id>\d+)/?', actions.edit_book),
re_path(r'^upload_cover/(?P<book_id>\d+)/?', actions.upload_cover),
re_path(r'^edit-readthrough/?', actions.edit_readthrough),
re_path(r'^delete-readthrough/?', actions.delete_readthrough),
re_path(r'^rate/?$', actions.rate),
re_path(r'^review/?$', actions.review),
re_path(r'^quotate/?$', actions.quotate),

View file

@ -2,6 +2,8 @@
from io import BytesIO, TextIOWrapper
from PIL import Image
import dateutil.parser
from dateutil.parser import ParserError
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required, permission_required
from django.core.files.base import ContentFile
@ -261,6 +263,51 @@ def upload_cover(request, book_id):
return redirect('/book/%s' % book.id)
@login_required
def edit_readthrough(request):
''' can't use the form because the dates are too finnicky '''
try:
readthrough = models.ReadThrough.objects.get(id=request.POST.get('id'))
except models.ReadThrough.DoesNotExist:
return HttpResponseNotFound()
# don't let people edit other people's data
if request.user != readthrough.user:
return HttpResponseBadRequest()
# convert dates into a legible format
start_date = request.POST.get('start_date')
try:
start_date = dateutil.parser.parse(start_date)
except ParserError:
start_date = None
readthrough.start_date = start_date
finish_date = request.POST.get('finish_date')
try:
finish_date = dateutil.parser.parse(finish_date)
except ParserError:
finish_date = None
readthrough.finish_date = finish_date
readthrough.save()
return redirect(request.headers.get('Referer', '/'))
@login_required
def delete_readthrough(request):
''' remove a readthrough '''
try:
readthrough = models.ReadThrough.objects.get(id=request.POST.get('id'))
except models.ReadThrough.DoesNotExist:
return HttpResponseNotFound()
# don't let people edit other people's data
if request.user != readthrough.user:
return HttpResponseBadRequest()
readthrough.delete()
return redirect(request.headers.get('Referer', '/'))
@login_required
def shelve(request):
''' put a on a user's shelf '''
@ -491,12 +538,16 @@ def import_data(request):
''' ingest a goodreads csv '''
form = forms.ImportForm(request.POST, request.FILES)
if form.is_valid():
include_reviews = request.POST.get('include_reviews') == 'on'
privacy = request.POST.get('privacy')
try:
job = goodreads_import.create_job(
request.user,
TextIOWrapper(
request.FILES['csv_file'],
encoding=request.encoding)
encoding=request.encoding),
include_reviews,
privacy,
)
except (UnicodeDecodeError, ValueError):
return HttpResponseBadRequest('Not a valid csv file')

View file

@ -112,26 +112,34 @@ def home_tab(request, tab):
def get_activity_feed(user, filter_level, model=models.Status):
''' get a filtered queryset of statuses '''
# status updates for your follow network
following = models.User.objects.filter(
Q(followers=user) | Q(id=user.id)
)
if user.is_anonymous:
user = None
if user:
following = models.User.objects.filter(
Q(followers=user) | Q(id=user.id)
)
else:
following = []
activities = model
if hasattr(model, 'objects'):
activities = model.objects.filter(deleted=False)
activities = model.objects
activities = activities.order_by(
'-created_date'
activities = activities.filter(
deleted=False
).order_by(
'-published_date'
)
if hasattr(activities, 'select_subclasses'):
activities = activities.select_subclasses()
# TODO: privacy relationshup between request.user and user
if filter_level in ['friends', 'home']:
# people you follow and direct mentions
activities = activities.filter(
Q(user__in=following, privacy__in=['public', 'unlisted', 'followers']) | \
Q(mention_users=user) | Q(user=user)
Q(user__in=following, privacy__in=[
'public', 'unlisted', 'followers'
]) | Q(mention_users=user) | Q(user=user)
)
elif filter_level == 'self':
activities = activities.filter(user=user, privacy='public')
@ -470,14 +478,21 @@ def book_page(request, book_id):
reviews = models.Review.objects.filter(
book__in=work.edition_set.all(),
).order_by('-published_date')
)
reviews = get_activity_feed(request.user, 'federated', model=reviews)
user_tags = []
readthroughs = []
if request.user.is_authenticated:
user_tags = models.Tag.objects.filter(
book=book, user=request.user
).values_list('identifier', flat=True)
readthroughs = models.ReadThrough.objects.filter(
user=request.user,
book=book,
).order_by('start_date')
rating = reviews.aggregate(Avg('rating'))
tags = models.Tag.objects.filter(
book=book
@ -495,6 +510,7 @@ def book_page(request, book_id):
'review_form': forms.ReviewForm(),
'quotation_form': forms.QuotationForm(),
'comment_form': forms.CommentForm(),
'readthroughs': readthroughs,
'tag_form': forms.TagForm(),
'path': '/book/%s' % book_id,
'cover_form': forms.CoverForm(instance=book),