Merge branch 'main' into production

This commit is contained in:
Mouse Reeve 2021-01-03 08:18:57 -08:00
commit 2635d109ed
28 changed files with 742 additions and 305 deletions

View file

@ -2,3 +2,5 @@
from .settings import CONNECTORS
from .abstract_connector import ConnectorException
from .abstract_connector import get_data, get_image
from .connector_manager import search, local_search, first_search_result

View file

@ -6,17 +6,13 @@ from urllib3.exceptions import RequestError
from django.db import transaction
import requests
from requests import HTTPError
from requests.exceptions import SSLError
from bookwyrm import activitypub, models, settings
from .connector_manager import load_more_data, ConnectorException
logger = logging.getLogger(__name__)
class ConnectorException(HTTPError):
''' when the connector can't do what was asked '''
class AbstractMinimalConnector(ABC):
''' just the bare bones, for other bookwyrm instances '''
def __init__(self, identifier):
@ -90,7 +86,6 @@ class AbstractConnector(AbstractMinimalConnector):
return True
@transaction.atomic
def get_or_create_book(self, remote_id):
''' translate arbitrary json into an Activitypub dataclass '''
# first, check if we have the origin_id saved
@ -123,13 +118,17 @@ class AbstractConnector(AbstractMinimalConnector):
if not work_data or not edition_data:
raise ConnectorException('Unable to load book data: %s' % remote_id)
# create activitypub object
work_activity = activitypub.Work(**work_data)
# this will dedupe automatically
work = work_activity.to_model(models.Work)
for author in self.get_authors_from_data(data):
work.authors.add(author)
return self.create_edition_from_data(work, edition_data)
with transaction.atomic():
# create activitypub object
work_activity = activitypub.Work(**work_data)
# this will dedupe automatically
work = work_activity.to_model(models.Work)
for author in self.get_authors_from_data(data):
work.authors.add(author)
edition = self.create_edition_from_data(work, edition_data)
load_more_data.delay(self.connector.id, work.id)
return edition
def create_edition_from_data(self, work, edition_data):
@ -206,7 +205,7 @@ def get_data(url):
'User-Agent': settings.USER_AGENT,
},
)
except RequestError:
except (RequestError, SSLError):
raise ConnectorException()
if not resp.ok:
resp.raise_for_status()

View file

@ -1,51 +1,15 @@
''' select and call a connector for whatever book task needs doing '''
''' interface with whatever connectors the app has '''
import importlib
from urllib.parse import urlparse
from requests import HTTPError
from bookwyrm import models
from bookwyrm.connectors import ConnectorException
from bookwyrm.tasks import app
def get_edition(book_id):
''' look up a book in the db and return an edition '''
book = models.Book.objects.select_subclasses().get(id=book_id)
if isinstance(book, models.Work):
book = book.default_edition
return book
def get_or_create_connector(remote_id):
''' get the connector related to the author's server '''
url = urlparse(remote_id)
identifier = url.netloc
if not identifier:
raise ValueError('Invalid remote id')
try:
connector_info = models.Connector.objects.get(identifier=identifier)
except models.Connector.DoesNotExist:
connector_info = models.Connector.objects.create(
identifier=identifier,
connector_file='bookwyrm_connector',
base_url='https://%s' % identifier,
books_url='https://%s/book' % identifier,
covers_url='https://%s/images/covers' % identifier,
search_url='https://%s/search?q=' % identifier,
priority=2
)
return load_connector(connector_info)
@app.task
def load_more_data(book_id):
''' background the work of getting all 10,000 editions of LoTR '''
book = models.Book.objects.select_subclasses().get(id=book_id)
connector = load_connector(book.connector)
connector.expand_book_data(book)
class ConnectorException(HTTPError):
''' when the connector can't do what was asked '''
def search(query, min_confidence=0.1):
@ -92,6 +56,38 @@ def get_connectors():
yield load_connector(info)
def get_or_create_connector(remote_id):
''' get the connector related to the author's server '''
url = urlparse(remote_id)
identifier = url.netloc
if not identifier:
raise ValueError('Invalid remote id')
try:
connector_info = models.Connector.objects.get(identifier=identifier)
except models.Connector.DoesNotExist:
connector_info = models.Connector.objects.create(
identifier=identifier,
connector_file='bookwyrm_connector',
base_url='https://%s' % identifier,
books_url='https://%s/book' % identifier,
covers_url='https://%s/images/covers' % identifier,
search_url='https://%s/search?q=' % identifier,
priority=2
)
return load_connector(connector_info)
@app.task
def load_more_data(connector_id, book_id):
''' background the work of getting all 10,000 editions of LoTR '''
connector_info = models.Connector.objects.get(id=connector_id)
connector = load_connector(connector_info)
book = models.Book.objects.select_subclasses().get(id=book_id)
connector.expand_book_data(book)
def load_connector(connector_info):
''' instantiate the connector class '''
connector = importlib.import_module(

View file

@ -3,7 +3,8 @@ import re
from bookwyrm import models
from .abstract_connector import AbstractConnector, SearchResult, Mapping
from .abstract_connector import ConnectorException, get_data
from .abstract_connector import get_data
from .connector_manager import ConnectorException
from .openlibrary_languages import languages

View file

@ -1,6 +1,9 @@
''' using a bookwyrm instance as a source of book data '''
from functools import reduce
import operator
from django.contrib.postgres.search import SearchRank, SearchVector
from django.db.models import F
from django.db.models import Count, F, Q
from bookwyrm import models
from .abstract_connector import AbstractConnector, SearchResult
@ -9,38 +12,18 @@ from .abstract_connector import AbstractConnector, SearchResult
class Connector(AbstractConnector):
''' instantiate a connector '''
def search(self, query, min_confidence=0.1):
''' right now you can't search bookwyrm sorry, but when
that gets implemented it will totally rule '''
vector = SearchVector('title', weight='A') +\
SearchVector('subtitle', weight='B') +\
SearchVector('authors__name', weight='C') +\
SearchVector('isbn_13', weight='A') +\
SearchVector('isbn_10', weight='A') +\
SearchVector('openlibrary_key', weight='C') +\
SearchVector('goodreads_key', weight='C') +\
SearchVector('asin', weight='C') +\
SearchVector('oclc_number', weight='C') +\
SearchVector('remote_id', weight='C') +\
SearchVector('description', weight='D') +\
SearchVector('series', weight='D')
results = models.Edition.objects.annotate(
search=vector
).annotate(
rank=SearchRank(vector, query)
).filter(
rank__gt=min_confidence
).order_by('-rank')
# remove non-default editions, if possible
results = results.filter(parent_work__default_edition__id=F('id')) \
or results
''' search your local database '''
# first, try searching unqiue identifiers
results = search_identifiers(query)
if not results:
# then try searching title/author
results = search_title_author(query, min_confidence)
search_results = []
for book in results[:10]:
search_results.append(
self.format_search_result(book)
)
for result in results:
search_results.append(self.format_search_result(result))
if len(search_results) >= 10:
break
search_results.sort(key=lambda r: r.confidence, reverse=True)
return search_results
@ -52,7 +35,8 @@ class Connector(AbstractConnector):
year=search_result.published_date.year if \
search_result.published_date else None,
connector=self,
confidence=search_result.rank,
confidence=search_result.rank if \
hasattr(search_result, 'rank') else 1,
)
@ -74,3 +58,50 @@ class Connector(AbstractConnector):
def expand_book_data(self, book):
pass
def search_identifiers(query):
''' tries remote_id, isbn; defined as dedupe fields on the model '''
filters = [{f.name: query} for f in models.Edition._meta.get_fields() \
if hasattr(f, 'deduplication_field') and f.deduplication_field]
results = models.Edition.objects.filter(
reduce(operator.or_, (Q(**f) for f in filters))
).distinct()
# when there are multiple editions of the same work, pick the default.
# it would be odd for this to happen.
return results.filter(parent_work__default_edition__id=F('id')) \
or results
def search_title_author(query, min_confidence):
''' searches for title and author '''
vector = SearchVector('title', weight='A') +\
SearchVector('subtitle', weight='B') +\
SearchVector('authors__name', weight='C') +\
SearchVector('series', weight='D')
results = models.Edition.objects.annotate(
search=vector
).annotate(
rank=SearchRank(vector, query)
).filter(
rank__gt=min_confidence
).order_by('-rank')
# when there are multiple editions of the same work, pick the closest
editions_of_work = results.values(
'parent_work'
).annotate(
Count('parent_work')
).values_list('parent_work')
for work_id in set(editions_of_work):
editions = results.filter(parent_work=work_id)
default = editions.filter(parent_work__default_edition=F('id'))
default_rank = default.first().rank if default.exists() else 0
# if mutliple books have the top rank, pick the default edition
if default_rank == editions.first().rank:
yield default.first()
else:
yield editions.first()

View file

@ -8,8 +8,6 @@ from bookwyrm.models import ImportJob, ImportItem
from bookwyrm.status import create_notification
logger = logging.getLogger(__name__)
# TODO: remove or increase once we're confident it's not causing problems.
MAX_ENTRIES = 500
def create_job(user, csv_file, include_reviews, privacy):
@ -19,12 +17,13 @@ def create_job(user, csv_file, include_reviews, privacy):
include_reviews=include_reviews,
privacy=privacy
)
for index, entry in enumerate(list(csv.DictReader(csv_file))[:MAX_ENTRIES]):
for index, entry in enumerate(list(csv.DictReader(csv_file))):
if not all(x in entry for x in ('ISBN13', 'Title', 'Author')):
raise ValueError('Author, title, and isbn must be in data.')
ImportItem(job=job, index=index, data=entry).save()
return job
def create_retry_job(user, original_job, items):
''' retry items that didn't import '''
job = ImportJob.objects.create(
@ -37,6 +36,7 @@ def create_retry_job(user, original_job, items):
ImportItem(job=job, index=item.index, data=item.data).save()
return job
def start_import(job):
''' initalizes a csv import job '''
result = import_data.delay(job.id)
@ -49,7 +49,6 @@ def import_data(job_id):
''' does the actual lookup work in a celery task '''
job = ImportJob.objects.get(id=job_id)
try:
results = []
for item in job.items.all():
try:
item.resolve()
@ -61,7 +60,6 @@ def import_data(job_id):
if item.book:
item.save()
results.append(item)
# shelves book and handles reviews
outgoing.handle_imported_book(

View file

@ -9,8 +9,11 @@ from .connector import Connector
from .shelf import Shelf, ShelfBook
from .status import Status, GeneratedNote, Review, Comment, Quotation
from .status import Favorite, Boost, Notification, ReadThrough
from .status import Boost
from .attachment import Image
from .favorite import Favorite
from .notification import Notification
from .readthrough import ReadThrough
from .tag import Tag, UserTag

View file

@ -0,0 +1,26 @@
''' like/fav/star a status '''
from django.db import models
from django.utils import timezone
from bookwyrm import activitypub
from .base_model import ActivitypubMixin, BookWyrmModel
from . import fields
class Favorite(ActivitypubMixin, BookWyrmModel):
''' fav'ing a post '''
user = fields.ForeignKey(
'User', on_delete=models.PROTECT, activitypub_field='actor')
status = fields.ForeignKey(
'Status', on_delete=models.PROTECT, activitypub_field='object')
activity_serializer = activitypub.Like
def save(self, *args, **kwargs):
''' update user active time '''
self.user.last_active_date = timezone.now()
self.user.save()
super().save(*args, **kwargs)
class Meta:
''' can't fav things twice '''
unique_together = ('user', 'status')

View file

@ -6,7 +6,7 @@ from django.contrib.postgres.fields import JSONField
from django.db import models
from django.utils import timezone
from bookwyrm import books_manager
from bookwyrm.connectors import connector_manager
from bookwyrm.models import ReadThrough, User, Book
from .fields import PrivacyLevels
@ -71,7 +71,7 @@ class ImportItem(models.Model):
def get_book_from_isbn(self):
''' search by isbn '''
search_result = books_manager.first_search_result(
search_result = connector_manager.first_search_result(
self.isbn, min_confidence=0.999
)
if search_result:
@ -86,7 +86,7 @@ class ImportItem(models.Model):
self.data['Title'],
self.data['Author']
)
search_result = books_manager.first_search_result(
search_result = connector_manager.first_search_result(
search_term, min_confidence=0.999
)
if search_result:

View file

@ -0,0 +1,33 @@
''' alert a user to activity '''
from django.db import models
from .base_model import BookWyrmModel
NotificationType = models.TextChoices(
'NotificationType',
'FAVORITE REPLY MENTION TAG FOLLOW FOLLOW_REQUEST BOOST IMPORT')
class Notification(BookWyrmModel):
''' you've been tagged, liked, followed, etc '''
user = models.ForeignKey('User', on_delete=models.PROTECT)
related_book = models.ForeignKey(
'Edition', on_delete=models.PROTECT, null=True)
related_user = models.ForeignKey(
'User',
on_delete=models.PROTECT, null=True, related_name='related_user')
related_status = models.ForeignKey(
'Status', on_delete=models.PROTECT, null=True)
related_import = models.ForeignKey(
'ImportJob', on_delete=models.PROTECT, null=True)
read = models.BooleanField(default=False)
notification_type = models.CharField(
max_length=255, choices=NotificationType.choices)
class Meta:
''' checks if notifcation is in enum list for valid types '''
constraints = [
models.CheckConstraint(
check=models.Q(notification_type__in=NotificationType.values),
name="notification_type_valid",
)
]

View file

@ -0,0 +1,26 @@
''' progress in a book '''
from django.db import models
from django.utils import timezone
from .base_model import BookWyrmModel
class ReadThrough(BookWyrmModel):
''' Store progress through a book in the database. '''
user = models.ForeignKey('User', on_delete=models.PROTECT)
book = models.ForeignKey('Edition', on_delete=models.PROTECT)
pages_read = models.IntegerField(
null=True,
blank=True)
start_date = models.DateTimeField(
blank=True,
null=True)
finish_date = models.DateTimeField(
blank=True,
null=True)
def save(self, *args, **kwargs):
''' update user active time '''
self.user.last_active_date = timezone.now()
self.user.save()
super().save(*args, **kwargs)

View file

@ -222,26 +222,6 @@ class Review(Status):
pure_type = 'Article'
class Favorite(ActivitypubMixin, BookWyrmModel):
''' fav'ing a post '''
user = fields.ForeignKey(
'User', on_delete=models.PROTECT, activitypub_field='actor')
status = fields.ForeignKey(
'Status', on_delete=models.PROTECT, activitypub_field='object')
activity_serializer = activitypub.Like
def save(self, *args, **kwargs):
''' update user active time '''
self.user.last_active_date = timezone.now()
self.user.save()
super().save(*args, **kwargs)
class Meta:
''' can't fav things twice '''
unique_together = ('user', 'status')
class Boost(Status):
''' boost'ing a post '''
boosted_status = fields.ForeignKey(
@ -268,54 +248,3 @@ class Boost(Status):
# This constraint can't work as it would cross tables.
# class Meta:
# unique_together = ('user', 'boosted_status')
class ReadThrough(BookWyrmModel):
''' Store progress through a book in the database. '''
user = models.ForeignKey('User', on_delete=models.PROTECT)
book = models.ForeignKey('Edition', on_delete=models.PROTECT)
pages_read = models.IntegerField(
null=True,
blank=True)
start_date = models.DateTimeField(
blank=True,
null=True)
finish_date = models.DateTimeField(
blank=True,
null=True)
def save(self, *args, **kwargs):
''' update user active time '''
self.user.last_active_date = timezone.now()
self.user.save()
super().save(*args, **kwargs)
NotificationType = models.TextChoices(
'NotificationType',
'FAVORITE REPLY MENTION TAG FOLLOW FOLLOW_REQUEST BOOST IMPORT')
class Notification(BookWyrmModel):
''' you've been tagged, liked, followed, etc '''
user = models.ForeignKey('User', on_delete=models.PROTECT)
related_book = models.ForeignKey(
'Edition', on_delete=models.PROTECT, null=True)
related_user = models.ForeignKey(
'User',
on_delete=models.PROTECT, null=True, related_name='related_user')
related_status = models.ForeignKey(
'Status', on_delete=models.PROTECT, null=True)
related_import = models.ForeignKey(
'ImportJob', on_delete=models.PROTECT, null=True)
read = models.BooleanField(default=False)
notification_type = models.CharField(
max_length=255, choices=NotificationType.choices)
class Meta:
''' checks if notifcation is in enum list for valid types '''
constraints = [
models.CheckConstraint(
check=models.Q(notification_type__in=NotificationType.values),
name="notification_type_valid",
)
]

View file

@ -166,22 +166,23 @@ def handle_imported_book(user, item, include_reviews, privacy):
if not item.book:
return
if item.shelf:
existing_shelf = models.ShelfBook.objects.filter(
book=item.book, added_by=user).exists()
# shelve the book if it hasn't been shelved already
if item.shelf and not existing_shelf:
desired_shelf = models.Shelf.objects.get(
identifier=item.shelf,
user=user
)
# shelve the book if it hasn't been shelved already
shelf_book, created = models.ShelfBook.objects.get_or_create(
shelf_book = models.ShelfBook.objects.create(
book=item.book, shelf=desired_shelf, added_by=user)
if created:
broadcast(user, shelf_book.to_add_activity(user), privacy=privacy)
broadcast(user, shelf_book.to_add_activity(user), privacy=privacy)
# only add new read-throughs if the item isn't already shelved
for read in item.reads:
read.book = item.book
read.user = user
read.save()
for read in item.reads:
read.book = item.book
read.user = user
read.save()
if include_reviews and (item.rating or item.review):
review_title = 'Review of {!r} on Goodreads'.format(
@ -220,8 +221,65 @@ def handle_status(user, form):
status.save()
# inspect the text for user tags
matches = []
for match in re.finditer(regex.strict_username, status.content):
content = status.content
for (mention_text, mention_user) in find_mentions(content):
# add them to status mentions fk
status.mention_users.add(mention_user)
# turn the mention into a link
content = re.sub(
r'%s([^@]|$)' % mention_text,
r'<a href="%s">%s</a>\g<1>' % \
(mention_user.remote_id, mention_text),
content)
# add reply parent to mentions and notify
if status.reply_parent:
status.mention_users.add(status.reply_parent.user)
for mention_user in status.reply_parent.mention_users.all():
status.mention_users.add(mention_user)
if status.reply_parent.user.local:
create_notification(
status.reply_parent.user,
'REPLY',
related_user=user,
related_status=status
)
# deduplicate mentions
status.mention_users.set(set(status.mention_users.all()))
# create mention notifications
for mention_user in status.mention_users.all():
if status.reply_parent and mention_user == status.reply_parent.user:
continue
if mention_user.local:
create_notification(
mention_user,
'MENTION',
related_user=user,
related_status=status
)
# don't apply formatting to generated notes
if not isinstance(status, models.GeneratedNote):
status.content = to_markdown(content)
# do apply formatting to quotes
if hasattr(status, 'quote'):
status.quote = to_markdown(status.quote)
status.save()
broadcast(user, status.to_create_activity(user), software='bookwyrm')
# re-format the activity for non-bookwyrm servers
remote_activity = status.to_create_activity(user, pure=True)
broadcast(user, remote_activity, software='other')
def find_mentions(content):
''' detect @mentions in raw status content '''
for match in re.finditer(regex.strict_username, content):
username = match.group().strip().split('@')[1:]
if len(username) == 1:
# this looks like a local user (@user), fill in the domain
@ -232,44 +290,7 @@ def handle_status(user, form):
if not mention_user:
# we can ignore users we don't know about
continue
matches.append((match.group(), mention_user.remote_id))
# add them to status mentions fk
status.mention_users.add(mention_user)
# create notification if the mentioned user is local
if mention_user.local:
create_notification(
mention_user,
'MENTION',
related_user=user,
related_status=status
)
# add mentions
content = status.content
for (username, url) in matches:
content = re.sub(
r'%s([^@])' % username,
r'<a href="%s">%s</a>\g<1>' % (url, username),
content)
if not isinstance(status, models.GeneratedNote):
status.content = to_markdown(content)
if hasattr(status, 'quote'):
status.quote = to_markdown(status.quote)
status.save()
# notify reply parent or tagged users
if status.reply_parent and status.reply_parent.user.local:
create_notification(
status.reply_parent.user,
'REPLY',
related_user=user,
related_status=status
)
broadcast(user, status.to_create_activity(user), software='bookwyrm')
# re-format the activity for non-bookwyrm servers
remote_activity = status.to_create_activity(user, pure=True)
broadcast(user, remote_activity, software='other')
yield (match.group(), mention_user)
def to_markdown(content):

View file

@ -21,8 +21,6 @@
</div>
<button class="button is-primary" type="submit">Import</button>
</form>
<p>
Imports are limited in size, and only the first {{ limit }} items will be imported.
</div>
<div class="content block">

View file

@ -10,7 +10,7 @@
{% include 'snippets/content_warning_field.html' with parent_status=status %}
<label for="id_content_{{ status.id }}-{{ uuid }}" class="is-sr-only">Reply</label>
<div class="field">
<textarea class="textarea" name="content" placeholder="Leave a comment..." id="id_content_{{ status.id }}-{{ uuid }}" required="true">{{ status|mentions:request.user }}</textarea>
<textarea class="textarea" name="content" placeholder="Leave a comment..." id="id_content_{{ status.id }}-{{ uuid }}" required="true"></textarea>
</div>
</div>
<div class="column is-narrow">

View file

@ -1,4 +1,5 @@
''' testing book data connectors '''
from unittest.mock import patch
from django.test import TestCase
import responses
@ -104,8 +105,10 @@ class AbstractConnector(TestCase):
'https://example.com/book/abcd',
json=self.edition_data
)
result = self.connector.get_or_create_book(
'https://example.com/book/abcd')
with patch(
'bookwyrm.connectors.abstract_connector.load_more_data.delay'):
result = self.connector.get_or_create_book(
'https://example.com/book/abcd')
self.assertEqual(result, self.book)
self.assertEqual(models.Edition.objects.count(), 1)
self.assertEqual(models.Edition.objects.count(), 1)

View file

@ -1,12 +1,18 @@
''' interface between the app and various connectors '''
from django.test import TestCase
from bookwyrm import books_manager, models
from bookwyrm.connectors.bookwyrm_connector import Connector as BookWyrmConnector
from bookwyrm.connectors.self_connector import Connector as SelfConnector
from bookwyrm import models
from bookwyrm.connectors import connector_manager
from bookwyrm.connectors.bookwyrm_connector \
import Connector as BookWyrmConnector
from bookwyrm.connectors.self_connector \
import Connector as SelfConnector
class Book(TestCase):
class ConnectorManager(TestCase):
''' interface between the app and various connectors '''
def setUp(self):
''' we'll need some books and a connector info entry '''
self.work = models.Work.objects.create(
title='Example Work'
)
@ -28,53 +34,50 @@ class Book(TestCase):
covers_url='http://test.com/',
)
def test_get_edition(self):
edition = books_manager.get_edition(self.edition.id)
self.assertEqual(edition, self.edition)
def test_get_edition_work(self):
edition = books_manager.get_edition(self.work.id)
self.assertEqual(edition, self.edition)
def test_get_or_create_connector(self):
''' loads a connector if the data source is known or creates one '''
remote_id = 'https://example.com/object/1'
connector = books_manager.get_or_create_connector(remote_id)
connector = connector_manager.get_or_create_connector(remote_id)
self.assertIsInstance(connector, BookWyrmConnector)
self.assertEqual(connector.identifier, 'example.com')
self.assertEqual(connector.base_url, 'https://example.com')
same_connector = books_manager.get_or_create_connector(remote_id)
same_connector = connector_manager.get_or_create_connector(remote_id)
self.assertEqual(connector.identifier, same_connector.identifier)
def test_get_connectors(self):
''' load all connectors '''
remote_id = 'https://example.com/object/1'
books_manager.get_or_create_connector(remote_id)
connectors = list(books_manager.get_connectors())
connector_manager.get_or_create_connector(remote_id)
connectors = list(connector_manager.get_connectors())
self.assertEqual(len(connectors), 2)
self.assertIsInstance(connectors[0], SelfConnector)
self.assertIsInstance(connectors[1], BookWyrmConnector)
def test_search(self):
results = books_manager.search('Example')
''' search all connectors '''
results = connector_manager.search('Example')
self.assertEqual(len(results), 1)
self.assertIsInstance(results[0]['connector'], SelfConnector)
self.assertEqual(len(results[0]['results']), 1)
self.assertEqual(results[0]['results'][0].title, 'Example Edition')
def test_local_search(self):
results = books_manager.local_search('Example')
''' search only the local database '''
results = connector_manager.local_search('Example')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].title, 'Example Edition')
def test_first_search_result(self):
result = books_manager.first_search_result('Example')
''' only get one search result '''
result = connector_manager.first_search_result('Example')
self.assertEqual(result.title, 'Example Edition')
no_result = books_manager.first_search_result('dkjfhg')
no_result = connector_manager.first_search_result('dkjfhg')
self.assertIsNone(no_result)
def test_load_connector(self):
connector = books_manager.load_connector(self.connector)
''' load a connector object from the database entry '''
connector = connector_manager.load_connector(self.connector)
self.assertIsInstance(connector, SelfConnector)
self.assertEqual(connector.identifier, 'test_connector')

View file

@ -12,7 +12,7 @@ from bookwyrm.connectors.openlibrary import get_languages, get_description
from bookwyrm.connectors.openlibrary import pick_default_edition, \
get_openlibrary_key
from bookwyrm.connectors.abstract_connector import SearchResult
from bookwyrm.connectors.abstract_connector import ConnectorException
from bookwyrm.connectors.connector_manager import ConnectorException
class Openlibrary(TestCase):

View file

@ -9,7 +9,9 @@ from bookwyrm.settings import DOMAIN
class SelfConnector(TestCase):
''' just uses local data '''
def setUp(self):
''' creating the connector '''
models.Connector.objects.create(
identifier=DOMAIN,
name='Local',
@ -22,58 +24,85 @@ class SelfConnector(TestCase):
priority=1,
)
self.connector = Connector(DOMAIN)
self.work = models.Work.objects.create(
title='Example Work',
)
author = models.Author.objects.create(name='Anonymous')
self.edition = models.Edition.objects.create(
title='Edition of Example Work',
published_date=datetime.datetime(1980, 5, 10, tzinfo=timezone.utc),
parent_work=self.work,
)
self.edition.authors.add(author)
models.Edition.objects.create(
title='Another Edition',
parent_work=self.work,
series='Anonymous'
)
models.Edition.objects.create(
title='More Editions',
subtitle='The Anonymous Edition',
parent_work=self.work,
)
edition = models.Edition.objects.create(
title='An Edition',
parent_work=self.work
)
edition.authors.add(models.Author.objects.create(name='Fish'))
def test_format_search_result(self):
''' create a SearchResult '''
author = models.Author.objects.create(name='Anonymous')
edition = models.Edition.objects.create(
title='Edition of Example Work',
published_date=datetime.datetime(1980, 5, 10, tzinfo=timezone.utc),
)
edition.authors.add(author)
result = self.connector.search('Edition of Example')[0]
self.assertEqual(result.title, 'Edition of Example Work')
self.assertEqual(result.key, self.edition.remote_id)
self.assertEqual(result.key, edition.remote_id)
self.assertEqual(result.author, 'Anonymous')
self.assertEqual(result.year, 1980)
self.assertEqual(result.connector, self.connector)
def test_search_rank(self):
''' prioritize certain results '''
author = models.Author.objects.create(name='Anonymous')
edition = models.Edition.objects.create(
title='Edition of Example Work',
published_date=datetime.datetime(1980, 5, 10, tzinfo=timezone.utc),
parent_work=models.Work.objects.create(title='')
)
# author text is rank C
edition.authors.add(author)
# series is rank D
models.Edition.objects.create(
title='Another Edition',
series='Anonymous',
parent_work=models.Work.objects.create(title='')
)
# subtitle is rank B
models.Edition.objects.create(
title='More Editions',
subtitle='The Anonymous Edition',
parent_work=models.Work.objects.create(title='')
)
# title is rank A
models.Edition.objects.create(title='Anonymous')
# doesn't rank in this search
edition = models.Edition.objects.create(
title='An Edition',
parent_work=models.Work.objects.create(title='')
)
results = self.connector.search('Anonymous')
self.assertEqual(len(results), 2)
self.assertEqual(results[0].title, 'More Editions')
self.assertEqual(results[1].title, 'Edition of Example Work')
self.assertEqual(len(results), 3)
self.assertEqual(results[0].title, 'Anonymous')
self.assertEqual(results[1].title, 'More Editions')
self.assertEqual(results[2].title, 'Edition of Example Work')
def test_search_default_filter(self):
def test_search_multiple_editions(self):
''' it should get rid of duplicate editions for the same work '''
self.work.default_edition = self.edition
self.work.save()
work = models.Work.objects.create(title='Work Title')
edition_1 = models.Edition.objects.create(
title='Edition 1 Title', parent_work=work)
edition_2 = models.Edition.objects.create(
title='Edition 2 Title', parent_work=work)
edition_3 = models.Edition.objects.create(
title='Fish', parent_work=work)
work.default_edition = edition_2
work.save()
results = self.connector.search('Anonymous')
# pick the best edition
results = self.connector.search('Edition 1 Title')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].title, 'Edition of Example Work')
self.assertEqual(results[0].key, edition_1.remote_id)
# pick the default edition when no match is best
results = self.connector.search('Edition Title')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].key, edition_2.remote_id)
# only matches one edition, so no deduplication takes place
results = self.connector.search('Fish')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].title, 'An Edition')
self.assertEqual(results[0].key, edition_3.remote_id)

View file

@ -0,0 +1,4 @@
Book Id,Title,Author,Author l-f,Additional Authors,ISBN,ISBN13,My Rating,Average Rating,Publisher,Binding,Number of Pages,Year Published,Original Publication Year,Date Read,Date Added,Bookshelves,Bookshelves with positions,Exclusive Shelf,My Review,Spoiler,Private Notes,Read Count,Recommended For,Recommended By,Owned Copies,Original Purchase Date,Original Purchase Location,Condition,Condition Description,BCID
42036538,Gideon the Ninth (The Locked Tomb #1),Tamsyn Muir,"Muir, Tamsyn",,"=""1250313198""","=""9781250313195""",0,4.20,Tor,Hardcover,448,2019,2019,2020/10/25,2020/10/21,,,read,,,,1,,,0,,,,,
52691223,Subcutanean,Aaron A. Reed,"Reed, Aaron A.",,"=""""","=""""",0,4.45,,Paperback,232,2020,,2020/03/06,2020/03/05,,,read,,,,1,,,0,,,,,
28694510,Patisserie at Home,Mélanie Dupuis,"Dupuis, Mélanie",Anne Cazor,"=""0062445316""","=""9780062445315""",2,4.60,Harper Design,Hardcover,288,2016,,,2019/07/08,,,read,"mixed feelings",,,2,,,0,,,,,
1 Book Id Title Author Author l-f Additional Authors ISBN ISBN13 My Rating Average Rating Publisher Binding Number of Pages Year Published Original Publication Year Date Read Date Added Bookshelves Bookshelves with positions Exclusive Shelf My Review Spoiler Private Notes Read Count Recommended For Recommended By Owned Copies Original Purchase Date Original Purchase Location Condition Condition Description BCID
2 42036538 Gideon the Ninth (The Locked Tomb #1) Tamsyn Muir Muir, Tamsyn ="1250313198" ="9781250313195" 0 4.20 Tor Hardcover 448 2019 2019 2020/10/25 2020/10/21 read 1 0
3 52691223 Subcutanean Aaron A. Reed Reed, Aaron A. ="" ="" 0 4.45 Paperback 232 2020 2020/03/06 2020/03/05 read 1 0
4 28694510 Patisserie at Home Mélanie Dupuis Dupuis, Mélanie Anne Cazor ="0062445316" ="9780062445315" 2 4.60 Harper Design Hardcover 288 2016 2019/07/08 read mixed feelings 2 0

View file

@ -8,7 +8,8 @@ from django.utils import timezone
from django.test import TestCase
import responses
from bookwyrm import books_manager, models
from bookwyrm import models
from bookwyrm.connectors import connector_manager
from bookwyrm.connectors.abstract_connector import SearchResult
@ -134,7 +135,7 @@ class ImportJob(TestCase):
search_url='https://openlibrary.org/search?q=',
priority=3,
)
connector = books_manager.load_connector(connector_info)
connector = connector_manager.load_connector(connector_info)
result = SearchResult(
title='Test Result',
key='https://openlibrary.org/works/OL1234W',
@ -163,8 +164,12 @@ class ImportJob(TestCase):
json={'name': 'test author'},
status=200)
with patch('bookwyrm.books_manager.first_search_result') as search:
search.return_value = result
book = self.item_1.get_book_from_isbn()
with patch(
'bookwyrm.connectors.abstract_connector.load_more_data.delay'):
with patch(
'bookwyrm.connectors.connector_manager.first_search_result'
) as search:
search.return_value = result
book = self.item_1.get_book_from_isbn()
self.assertEqual(book.title, 'Sabriel')

View file

@ -0,0 +1,104 @@
''' testing import '''
from collections import namedtuple
import pathlib
from unittest.mock import patch
from django.test import TestCase
import responses
from bookwyrm import goodreads_import, models
from bookwyrm.settings import DOMAIN
class GoodreadsImport(TestCase):
''' importing from goodreads csv '''
def setUp(self):
''' use a test csv '''
datafile = pathlib.Path(__file__).parent.joinpath(
'data/goodreads.csv')
self.csv = open(datafile, 'r')
self.user = models.User.objects.create_user(
'mouse', 'mouse@mouse.mouse', 'password', local=True)
models.Connector.objects.create(
identifier=DOMAIN,
name='Local',
local=True,
connector_file='self_connector',
base_url='https://%s' % DOMAIN,
books_url='https://%s/book' % DOMAIN,
covers_url='https://%s/images/covers' % DOMAIN,
search_url='https://%s/search?q=' % DOMAIN,
priority=1,
)
def test_create_job(self):
''' creates the import job entry and checks csv '''
import_job = goodreads_import.create_job(
self.user, self.csv, False, 'public')
self.assertEqual(import_job.user, self.user)
self.assertEqual(import_job.include_reviews, False)
self.assertEqual(import_job.privacy, 'public')
import_items = models.ImportItem.objects.filter(job=import_job).all()
self.assertEqual(len(import_items), 3)
self.assertEqual(import_items[0].index, 0)
self.assertEqual(import_items[0].data['Book Id'], '42036538')
self.assertEqual(import_items[1].index, 1)
self.assertEqual(import_items[1].data['Book Id'], '52691223')
self.assertEqual(import_items[2].index, 2)
self.assertEqual(import_items[2].data['Book Id'], '28694510')
def test_create_retry_job(self):
''' trying again with items that didn't import '''
import_job = goodreads_import.create_job(
self.user, self.csv, False, 'unlisted')
import_items = models.ImportItem.objects.filter(
job=import_job
).all()[:2]
retry = goodreads_import.create_retry_job(
self.user, import_job, import_items)
self.assertNotEqual(import_job, retry)
self.assertEqual(retry.user, self.user)
self.assertEqual(retry.include_reviews, False)
self.assertEqual(retry.privacy, 'unlisted')
retry_items = models.ImportItem.objects.filter(job=retry).all()
self.assertEqual(len(retry_items), 2)
self.assertEqual(retry_items[0].index, 0)
self.assertEqual(retry_items[0].data['Book Id'], '42036538')
self.assertEqual(retry_items[1].index, 1)
self.assertEqual(retry_items[1].data['Book Id'], '52691223')
def test_start_import(self):
''' begin loading books '''
import_job = goodreads_import.create_job(
self.user, self.csv, False, 'unlisted')
MockTask = namedtuple('Task', ('id'))
mock_task = MockTask(7)
with patch('bookwyrm.goodreads_import.import_data.delay') as start:
start.return_value = mock_task
goodreads_import.start_import(import_job)
import_job.refresh_from_db()
self.assertEqual(import_job.task_id, '7')
@responses.activate
def test_import_data(self):
''' resolve entry '''
import_job = goodreads_import.create_job(
self.user, self.csv, False, 'unlisted')
book = models.Edition.objects.create(title='Test Book')
with patch(
'bookwyrm.models.import_job.ImportItem.get_book_from_isbn'
) as resolve:
resolve.return_value = book
with patch('bookwyrm.outgoing.handle_imported_book'):
goodreads_import.import_data(import_job.id)
import_item = models.ImportItem.objects.get(job=import_job, index=0)
self.assertEqual(import_item.book.id, book.id)

View file

@ -1,4 +1,5 @@
''' sending out activities '''
import csv
import json
import pathlib
from unittest.mock import patch
@ -8,10 +9,11 @@ from django.test import TestCase
from django.test.client import RequestFactory
import responses
from bookwyrm import models, outgoing
from bookwyrm import forms, models, outgoing
from bookwyrm.settings import DOMAIN
# pylint: disable=too-many-public-methods
class Outgoing(TestCase):
''' sends out activities '''
def setUp(self):
@ -255,3 +257,190 @@ class Outgoing(TestCase):
with patch('bookwyrm.broadcast.broadcast_task.delay'):
outgoing.handle_unshelve(self.local_user, self.book, self.shelf)
self.assertEqual(self.shelf.books.count(), 0)
def test_handle_imported_book(self):
''' goodreads import added a book, this adds related connections '''
shelf = self.local_user.shelf_set.filter(identifier='read').first()
self.assertIsNone(shelf.books.first())
import_job = models.ImportJob.objects.create(user=self.local_user)
datafile = pathlib.Path(__file__).parent.joinpath('data/goodreads.csv')
csv_file = open(datafile, 'r')
for index, entry in enumerate(list(csv.DictReader(csv_file))):
import_item = models.ImportItem.objects.create(
job_id=import_job.id, index=index, data=entry, book=self.book)
break
with patch('bookwyrm.broadcast.broadcast_task.delay'):
outgoing.handle_imported_book(
self.local_user, import_item, False, 'public')
shelf.refresh_from_db()
self.assertEqual(shelf.books.first(), self.book)
readthrough = models.ReadThrough.objects.get(user=self.local_user)
self.assertEqual(readthrough.book, self.book)
# I can't remember how to create dates and I don't want to look it up.
self.assertEqual(readthrough.start_date.year, 2020)
self.assertEqual(readthrough.start_date.month, 10)
self.assertEqual(readthrough.start_date.day, 21)
self.assertEqual(readthrough.finish_date.year, 2020)
self.assertEqual(readthrough.finish_date.month, 10)
self.assertEqual(readthrough.finish_date.day, 25)
def test_handle_imported_book_already_shelved(self):
''' goodreads import added a book, this adds related connections '''
shelf = self.local_user.shelf_set.filter(identifier='to-read').first()
models.ShelfBook.objects.create(
shelf=shelf, added_by=self.local_user, book=self.book)
import_job = models.ImportJob.objects.create(user=self.local_user)
datafile = pathlib.Path(__file__).parent.joinpath('data/goodreads.csv')
csv_file = open(datafile, 'r')
for index, entry in enumerate(list(csv.DictReader(csv_file))):
import_item = models.ImportItem.objects.create(
job_id=import_job.id, index=index, data=entry, book=self.book)
break
with patch('bookwyrm.broadcast.broadcast_task.delay'):
outgoing.handle_imported_book(
self.local_user, import_item, False, 'public')
shelf.refresh_from_db()
self.assertEqual(shelf.books.first(), self.book)
self.assertIsNone(
self.local_user.shelf_set.get(identifier='read').books.first())
readthrough = models.ReadThrough.objects.get(user=self.local_user)
self.assertEqual(readthrough.book, self.book)
self.assertEqual(readthrough.start_date.year, 2020)
self.assertEqual(readthrough.start_date.month, 10)
self.assertEqual(readthrough.start_date.day, 21)
self.assertEqual(readthrough.finish_date.year, 2020)
self.assertEqual(readthrough.finish_date.month, 10)
self.assertEqual(readthrough.finish_date.day, 25)
def test_handle_imported_book_review(self):
''' goodreads review import '''
import_job = models.ImportJob.objects.create(user=self.local_user)
datafile = pathlib.Path(__file__).parent.joinpath('data/goodreads.csv')
csv_file = open(datafile, 'r')
entry = list(csv.DictReader(csv_file))[2]
import_item = models.ImportItem.objects.create(
job_id=import_job.id, index=0, data=entry, book=self.book)
with patch('bookwyrm.broadcast.broadcast_task.delay'):
outgoing.handle_imported_book(
self.local_user, import_item, True, 'unlisted')
review = models.Review.objects.get(book=self.book, user=self.local_user)
self.assertEqual(review.content, 'mixed feelings')
self.assertEqual(review.rating, 2)
self.assertEqual(review.published_date.year, 2019)
self.assertEqual(review.published_date.month, 7)
self.assertEqual(review.published_date.day, 8)
self.assertEqual(review.privacy, 'unlisted')
def test_handle_imported_book_reviews_disabled(self):
''' goodreads review import '''
import_job = models.ImportJob.objects.create(user=self.local_user)
datafile = pathlib.Path(__file__).parent.joinpath('data/goodreads.csv')
csv_file = open(datafile, 'r')
entry = list(csv.DictReader(csv_file))[2]
import_item = models.ImportItem.objects.create(
job_id=import_job.id, index=0, data=entry, book=self.book)
with patch('bookwyrm.broadcast.broadcast_task.delay'):
outgoing.handle_imported_book(
self.local_user, import_item, False, 'unlisted')
self.assertFalse(models.Review.objects.filter(
book=self.book, user=self.local_user
).exists())
def test_handle_status(self):
''' create a status '''
form = forms.CommentForm({
'content': 'hi',
'user': self.local_user.id,
'book': self.book.id,
'privacy': 'public',
})
with patch('bookwyrm.broadcast.broadcast_task.delay'):
outgoing.handle_status(self.local_user, form)
status = models.Comment.objects.get()
self.assertEqual(status.content, '<p>hi</p>')
self.assertEqual(status.user, self.local_user)
self.assertEqual(status.book, self.book)
def test_handle_status_reply(self):
''' create a status in reply to an existing status '''
user = models.User.objects.create_user(
'rat', 'rat@rat.com', 'password', local=True)
parent = models.Status.objects.create(
content='parent status', user=self.local_user)
form = forms.ReplyForm({
'content': 'hi',
'user': user.id,
'reply_parent': parent.id,
'privacy': 'public',
})
with patch('bookwyrm.broadcast.broadcast_task.delay'):
outgoing.handle_status(user, form)
status = models.Status.objects.get(user=user)
self.assertEqual(status.content, '<p>hi</p>')
self.assertEqual(status.user, user)
self.assertEqual(
models.Notification.objects.get().user, self.local_user)
def test_handle_status_mentions(self):
''' @mention a user in a post '''
user = models.User.objects.create_user(
'rat', 'rat@rat.com', 'password', local=True)
form = forms.CommentForm({
'content': 'hi @rat',
'user': self.local_user.id,
'book': self.book.id,
'privacy': 'public',
})
with patch('bookwyrm.broadcast.broadcast_task.delay'):
outgoing.handle_status(self.local_user, form)
status = models.Status.objects.get()
self.assertEqual(
status.content,
'<p>hi <a href="%s">@rat</a></p>' % user.remote_id)
self.assertEqual(list(status.mention_users.all()), [user])
self.assertEqual(models.Notification.objects.get().user, user)
def test_handle_status_reply_with_mentions(self):
''' reply to a post with an @mention'ed user '''
user = models.User.objects.create_user(
'rat', 'rat@rat.com', 'password', local=True)
form = forms.CommentForm({
'content': 'hi @rat@example.com',
'user': self.local_user.id,
'book': self.book.id,
'privacy': 'public',
})
with patch('bookwyrm.broadcast.broadcast_task.delay'):
outgoing.handle_status(self.local_user, form)
status = models.Status.objects.get()
form = forms.ReplyForm({
'content': 'right',
'user': user,
'privacy': 'public',
'reply_parent': status.id
})
with patch('bookwyrm.broadcast.broadcast_task.delay'):
outgoing.handle_status(user, form)
reply = models.Status.replies(status).first()
self.assertEqual(reply.content, '<p>right</p>')
self.assertEqual(reply.user, user)
self.assertTrue(self.remote_user in reply.mention_users.all())
self.assertTrue(self.local_user in reply.mention_users.all())

View file

@ -10,7 +10,7 @@ from django.test.client import RequestFactory
from bookwyrm import models, views
from bookwyrm.connectors import abstract_connector
from bookwyrm.settings import DOMAIN
from bookwyrm.settings import DOMAIN, USER_AGENT
# pylint: disable=too-many-public-methods
@ -39,6 +39,14 @@ class Views(TestCase):
)
def test_get_edition(self):
''' given an edition or a work, returns an edition '''
self.assertEqual(
views.get_edition(self.book.id), self.book)
self.assertEqual(
views.get_edition(self.work.id), self.book)
def test_get_user_from_username(self):
''' works for either localname or username '''
self.assertEqual(
@ -193,7 +201,8 @@ class Views(TestCase):
request = self.factory.get('', {'q': 'Test Book'})
with patch('bookwyrm.views.is_api_request') as is_api:
is_api.return_value = False
with patch('bookwyrm.books_manager.search') as manager:
with patch(
'bookwyrm.connectors.connector_manager.search') as manager:
manager.return_value = [search_result]
response = views.search(request)
self.assertIsInstance(response, TemplateResponse)
@ -530,3 +539,16 @@ class Views(TestCase):
request, self.local_user.username, shelf.identifier)
self.assertIsInstance(result, JsonResponse)
self.assertEqual(result.status_code, 200)
def test_is_bookwyrm_request(self):
''' tests the function that checks if a request came from a bookwyrm instance '''
request = self.factory.get('', {'q': 'Test Book'})
self.assertFalse(views.is_bookworm_request(request))
request = self.factory.get('', {'q': 'Test Book'},
HTTP_USER_AGENT="http.rb/4.4.1 (Mastodon/3.3.0; +https://mastodon.social/)")
self.assertFalse(views.is_bookworm_request(request))
request = self.factory.get('', {'q': 'Test Book'}, HTTP_USER_AGENT=USER_AGENT)
self.assertTrue(views.is_bookworm_request(request))

View file

@ -6,3 +6,5 @@ strict_localname = r'@[a-zA-Z_\-\.0-9]+'
username = r'%s(@%s)?' % (localname, domain)
strict_username = r'%s(@%s)?' % (strict_localname, domain)
full_username = r'%s@%s' % (localname, domain)
# should match (BookWyrm/1.0.0; or (BookWyrm/99.1.2;
bookwyrm_user_agent = r'\(BookWyrm/[0-9]+\.[0-9]+\.[0-9]+;'

View file

@ -17,11 +17,12 @@ from django.template.response import TemplateResponse
from django.utils import timezone
from django.views.decorators.http import require_GET, require_POST
from bookwyrm import books_manager, forms, models, outgoing, goodreads_import
from bookwyrm import forms, models, outgoing, goodreads_import
from bookwyrm.connectors import connector_manager
from bookwyrm.broadcast import broadcast
from bookwyrm.emailing import password_reset_email
from bookwyrm.settings import DOMAIN
from bookwyrm.views import get_user_from_username
from bookwyrm.views import get_user_from_username, get_edition
@require_POST
@ -210,10 +211,8 @@ def edit_profile(request):
def resolve_book(request):
''' figure out the local path to a book from a remote_id '''
remote_id = request.POST.get('remote_id')
connector = books_manager.get_or_create_connector(remote_id)
connector = connector_manager.get_or_create_connector(remote_id)
book = connector.get_or_create_book(remote_id)
if book.connector:
books_manager.load_more_data.delay(book.id)
return redirect('/book/%d' % book.id)
@ -371,7 +370,7 @@ def delete_shelf(request, shelf_id):
@require_POST
def shelve(request):
''' put a on a user's shelf '''
book = books_manager.get_edition(request.POST['book'])
book = get_edition(request.POST['book'])
desired_shelf = models.Shelf.objects.filter(
identifier=request.POST['shelf'],
@ -417,7 +416,7 @@ def unshelve(request):
@require_POST
def start_reading(request, book_id):
''' begin reading a book '''
book = books_manager.get_edition(book_id)
book = get_edition(book_id)
shelf = models.Shelf.objects.filter(
identifier='reading',
user=request.user
@ -453,7 +452,7 @@ def start_reading(request, book_id):
@require_POST
def finish_reading(request, book_id):
''' a user completed a book, yay '''
book = books_manager.get_edition(book_id)
book = get_edition(book_id)
shelf = models.Shelf.objects.filter(
identifier='read',
user=request.user

View file

@ -13,14 +13,21 @@ from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_GET
from bookwyrm import outgoing
from bookwyrm import forms, models
from bookwyrm.activitypub import ActivitypubResponse
from bookwyrm import forms, models, books_manager
from bookwyrm import goodreads_import
from bookwyrm.connectors import connector_manager
from bookwyrm.settings import PAGE_LENGTH
from bookwyrm.tasks import app
from bookwyrm.utils import regex
def get_edition(book_id):
''' look up a book in the db and return an edition '''
book = models.Book.objects.select_subclasses().get(id=book_id)
if isinstance(book, models.Work):
book = book.get_default_edition()
return book
def get_user_from_username(username):
''' helper function to resolve a localname or a username to a user '''
# raises DoesNotExist if user is now found
@ -35,6 +42,13 @@ def is_api_request(request):
return 'json' in request.headers.get('Accept') or \
request.path[-5:] == '.json'
def is_bookworm_request(request):
''' check if the request is coming from another bookworm instance '''
user_agent = request.headers.get('User-Agent')
if user_agent is None or re.search(regex.bookwyrm_user_agent, user_agent) is None:
return False
return True
def server_error_page(request):
''' 500 errors '''
@ -211,7 +225,7 @@ def search(request):
if is_api_request(request):
# only return local book results via json so we don't cause a cascade
book_results = books_manager.local_search(query)
book_results = connector_manager.local_search(query)
return JsonResponse([r.json() for r in book_results], safe=False)
# use webfinger for mastodon style account@domain.com username
@ -225,7 +239,7 @@ def search(request):
similarity__gt=0.5,
).order_by('-similarity')[:10]
book_results = books_manager.search(query)
book_results = connector_manager.search(query)
data = {
'title': 'Search Results',
'book_results': book_results,
@ -244,7 +258,6 @@ def import_page(request):
'import_form': forms.ImportForm(),
'jobs': models.ImportJob.
objects.filter(user=request.user).order_by('-created_date'),
'limit': goodreads_import.MAX_ENTRIES,
})
@ -499,7 +512,7 @@ def status_page(request, username, status_id):
return HttpResponseNotFound()
if is_api_request(request):
return ActivitypubResponse(status.to_activity())
return ActivitypubResponse(status.to_activity(pure=not is_bookworm_request(request)))
data = {
'title': 'Status by %s' % user.username,
@ -645,7 +658,7 @@ def book_page(request, book_id):
@require_GET
def edit_book_page(request, book_id):
''' info about a book '''
book = books_manager.get_edition(book_id)
book = get_edition(book_id)
if not book.description:
book.description = book.parent_work.description
data = {

View file

@ -20,8 +20,9 @@ app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
app.autodiscover_tasks(['bookwyrm'], related_name='activitypub.base_activity')
app.autodiscover_tasks(['bookwyrm'], related_name='books_manager')
app.autodiscover_tasks(['bookwyrm'], related_name='broadcast')
app.autodiscover_tasks(
['bookwyrm'], related_name='connectors.abstract_connector')
app.autodiscover_tasks(['bookwyrm'], related_name='emailing')
app.autodiscover_tasks(['bookwyrm'], related_name='goodreads_import')
app.autodiscover_tasks(['bookwyrm'], related_name='incoming')