Merge remote-tracking branch 'origin/master' into search-reapply

ghowardsit
Eric Holscher 2019-01-22 13:59:47 -05:00
commit 46c4f2084d
349 changed files with 8154 additions and 8557 deletions

View File

@ -1,11 +1,12 @@
language: python
python:
- 2.7
- 3.6
env:
- ES_VERSION=6.2.4 ES_DOWNLOAD_URL=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-${ES_VERSION}.tar.gz
matrix:
include:
- python: 3.6
env: TOXENV=py36 ES_VERSION=1.3.9 ES_DOWNLOAD_URL=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-${ES_VERSION}.tar.gz
- python: 3.6
env: TOXENV=docs
- python: 3.6
@ -45,6 +46,6 @@ notifications:
branches:
only:
- master
- master
- rel # Community release branch
- relcorp # Corporate release branch

2
common

@ -1 +1 @@
Subproject commit 46aad68c905ff843559b39cb52b5d54e586115c4
Subproject commit 2c42860327916ec66f3aed7cf3d7bab809438ab4

View File

@ -6,7 +6,6 @@ import os
import sys
import sphinx_rtd_theme
from recommonmark.parser import CommonMarkParser
sys.path.insert(0, os.path.abspath('..'))
sys.path.append(os.path.dirname(__file__))
@ -29,13 +28,11 @@ extensions = [
'doc_extensions',
'sphinx_tabs.tabs',
'sphinx-prompt',
'recommonmark',
]
templates_path = ['_templates']
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': CommonMarkParser,
}
master_doc = 'index'
project = u'Read the Docs'

View File

@ -233,7 +233,7 @@ We deploy readthedocs.org from the `rel` branch in our GitHub repository. You ca
How can I avoid search results having a deprecated version of my docs?
---------------------------------------------------------------------
----------------------------------------------------------------------
If readers search something related to your docs in Google, it will probably return the most relevant version of your documentation.
It may happen that this version is already deprecated and you want to stop Google indexing it as a result,

View File

@ -1,9 +1,10 @@
# -*- coding: utf-8 -*-
"""Read the Docs."""
import os.path
from future.moves.configparser import RawConfigParser
from configparser import RawConfigParser
def get_version(setupcfg_path):

View File

@ -1,3 +1,5 @@
"""App init"""
# -*- coding: utf-8 -*-
default_app_config = 'readthedocs.analytics.apps.AnalyticsAppConfig' # noqa
"""App init."""
default_app_config = 'readthedocs.analytics.apps.AnalyticsAppConfig' # noqa

View File

@ -1,12 +1,13 @@
# -*- coding: utf-8 -*-
"""Django app config for the analytics app."""
from __future__ import absolute_import
from django.apps import AppConfig
class AnalyticsAppConfig(AppConfig):
"""Analytics app init code"""
"""Analytics app init code."""
name = 'readthedocs.analytics'
verbose_name = 'Analytics'

View File

@ -1,6 +1,6 @@
"""Tasks for Read the Docs' analytics"""
# -*- coding: utf-8 -*-
from __future__ import absolute_import
"""Tasks for Read the Docs' analytics."""
from django.conf import settings
@ -11,24 +11,24 @@ from .utils import send_to_analytics
DEFAULT_PARAMETERS = {
'v': '1', # analytics version (always 1)
'aip': '1', # anonymize IP
'v': '1', # analytics version (always 1)
'aip': '1', # anonymize IP
'tid': settings.GLOBAL_ANALYTICS_CODE,
# User data
'uip': None, # User IP address
'ua': None, # User agent
'uip': None, # User IP address
'ua': None, # User agent
# Application info
'an': 'Read the Docs',
'av': readthedocs.__version__, # App version
'av': readthedocs.__version__, # App version
}
@app.task(queue='web')
def analytics_pageview(url, title=None, **kwargs):
"""
Send a pageview to Google Analytics
Send a pageview to Google Analytics.
:see: https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters
:param url: the URL of the pageview
@ -37,8 +37,8 @@ def analytics_pageview(url, title=None, **kwargs):
"""
data = {
't': 'pageview',
'dl': url, # URL of the pageview (required)
'dt': title, # Title of the page
'dl': url, # URL of the pageview (required)
'dt': title, # Title of the page
}
data.update(DEFAULT_PARAMETERS)
data.update(kwargs)
@ -46,9 +46,12 @@ def analytics_pageview(url, title=None, **kwargs):
@app.task(queue='web')
def analytics_event(event_category, event_action, event_label=None, event_value=None, **kwargs):
def analytics_event(
event_category, event_action, event_label=None, event_value=None,
**kwargs
):
"""
Send an analytics event to Google Analytics
Send an analytics event to Google Analytics.
:see: https://developers.google.com/analytics/devguides/collection/protocol/v1/devguide#event
:param event_category: the category of the event
@ -58,11 +61,11 @@ def analytics_event(event_category, event_action, event_label=None, event_value=
:param kwargs: extra event parameters to send to GA
"""
data = {
't': 'event', # GA event - don't change
'ec': event_category, # Event category (required)
'ea': event_action, # Event action (required)
'el': event_label, # Event label
'ev': event_value, # Event value (numeric)
't': 'event', # GA event - don't change
'ec': event_category, # Event category (required)
'ea': event_action, # Event action (required)
'el': event_label, # Event label
'ev': event_value, # Event value (numeric)
}
data.update(DEFAULT_PARAMETERS)
data.update(kwargs)

View File

@ -1,5 +1,4 @@
from __future__ import absolute_import, unicode_literals
# -*- coding: utf-8 -*-
from django.test import TestCase
from .utils import anonymize_ip_address, anonymize_user_agent
@ -29,4 +28,3 @@ class UtilsTests(TestCase):
anonymize_user_agent('Some rare user agent'),
'Rare user agent',
)

View File

@ -1,26 +1,23 @@
"""Utilities related to analytics"""
# -*- coding: utf-8 -*-
"""Utilities related to analytics."""
from __future__ import absolute_import, unicode_literals
import hashlib
import ipaddress
import logging
from django.conf import settings
from django.utils.encoding import force_text, force_bytes
from django.utils.crypto import get_random_string
import requests
from django.conf import settings
from django.utils.crypto import get_random_string
from django.utils.encoding import force_bytes, force_text
from user_agents import parse
try:
# Python 3.3+ only
import ipaddress
except ImportError:
from .vendor import ipaddress
log = logging.getLogger(__name__) # noqa
log = logging.getLogger(__name__) # noqa
def get_client_ip(request):
"""Gets the real IP based on a request object"""
"""Gets the real IP based on a request object."""
ip_address = request.META.get('REMOTE_ADDR')
# Get the original IP address (eg. "X-Forwarded-For: client, proxy1, proxy2")
@ -32,7 +29,7 @@ def get_client_ip(request):
def anonymize_ip_address(ip_address):
"""Anonymizes an IP address by zeroing the last 2 bytes"""
"""Anonymizes an IP address by zeroing the last 2 bytes."""
# Used to anonymize an IP by zero-ing out the last 2 bytes
ip_mask = int('0xFFFFFFFFFFFFFFFFFFFFFFFFFFFF0000', 16)
@ -46,7 +43,7 @@ def anonymize_ip_address(ip_address):
def anonymize_user_agent(user_agent):
"""Anonymizes rare user agents"""
"""Anonymizes rare user agents."""
# If the browser family is not recognized, this is a rare user agent
parsed_ua = parse(user_agent)
if parsed_ua.browser.family == 'Other' or parsed_ua.os.family == 'Other':
@ -56,7 +53,7 @@ def anonymize_user_agent(user_agent):
def send_to_analytics(data):
"""Sends data to Google Analytics"""
"""Sends data to Google Analytics."""
if data.get('uip') and data.get('ua'):
data['cid'] = generate_client_id(data['uip'], data['ua'])
@ -74,7 +71,7 @@ def send_to_analytics(data):
resp = requests.post(
'https://www.google-analytics.com/collect',
data=data,
timeout=3, # seconds
timeout=3, # seconds
)
except requests.Timeout:
log.warning('Timeout sending to Google Analytics')
@ -85,10 +82,10 @@ def send_to_analytics(data):
def generate_client_id(ip_address, user_agent):
"""
Create an advertising ID
Create an advertising ID.
This simplifies things but essentially if a user has the same IP and same UA,
this will treat them as the same user for analytics purposes
This simplifies things but essentially if a user has the same IP and same
UA, this will treat them as the same user for analytics purposes
"""
salt = b'advertising-client-id'

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +1,7 @@
# -*- coding: utf-8 -*-
"""API resources."""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
"""API resources."""
import logging
from builtins import object
import redis
from django.conf.urls import url
@ -25,6 +22,7 @@ from readthedocs.projects.models import ImportedFile, Project
from .utils import PostAuthentication
log = logging.getLogger(__name__)
@ -34,7 +32,7 @@ class ProjectResource(ModelResource):
users = fields.ToManyField('readthedocs.api.base.UserResource', 'users')
class Meta(object):
class Meta:
include_absolute_url = True
allowed_methods = ['get', 'post', 'put']
queryset = Project.objects.api()
@ -48,7 +46,7 @@ class ProjectResource(ModelResource):
def get_object_list(self, request):
self._meta.queryset = Project.objects.api(user=request.user)
return super(ProjectResource, self).get_object_list(request)
return super().get_object_list(request)
def dehydrate(self, bundle):
bundle.data['downloads'] = bundle.obj.get_downloads()
@ -72,7 +70,9 @@ class ProjectResource(ModelResource):
# Force this in an ugly way, at least should do "reverse"
deserialized['users'] = ['/api/v1/user/%s/' % request.user.id]
bundle = self.build_bundle(
data=dict_strip_unicode_keys(deserialized), request=request)
data=dict_strip_unicode_keys(deserialized),
request=request,
)
self.is_valid(bundle)
updated_bundle = self.obj_create(bundle, request=request)
return HttpCreated(location=self.get_resource_uri(updated_bundle))
@ -81,14 +81,20 @@ class ProjectResource(ModelResource):
return [
url(
r'^(?P<resource_name>%s)/schema/$' % self._meta.resource_name,
self.wrap_view('get_schema'), name='api_get_schema'),
self.wrap_view('get_schema'),
name='api_get_schema',
),
url(
r'^(?P<resource_name>%s)/search%s$' %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('get_search'), name='api_get_search'),
url((r'^(?P<resource_name>%s)/(?P<slug>[a-z-_]+)/$') %
self._meta.resource_name, self.wrap_view('dispatch_detail'),
name='api_dispatch_detail'),
self.wrap_view('get_search'),
name='api_get_search',
),
url(
(r'^(?P<resource_name>%s)/(?P<slug>[a-z-_]+)/$') % self._meta.resource_name,
self.wrap_view('dispatch_detail'),
name='api_dispatch_detail',
),
]
@ -98,7 +104,7 @@ class VersionResource(ModelResource):
project = fields.ForeignKey(ProjectResource, 'project', full=True)
class Meta(object):
class Meta:
allowed_methods = ['get', 'put', 'post']
always_return_data = True
queryset = Version.objects.api()
@ -112,7 +118,7 @@ class VersionResource(ModelResource):
def get_object_list(self, request):
self._meta.queryset = Version.objects.api(user=request.user)
return super(VersionResource, self).get_object_list(request)
return super().get_object_list(request)
def build_version(self, request, **kwargs):
project = get_object_or_404(Project, slug=kwargs['project_slug'])
@ -125,17 +131,23 @@ class VersionResource(ModelResource):
return [
url(
r'^(?P<resource_name>%s)/schema/$' % self._meta.resource_name,
self.wrap_view('get_schema'), name='api_get_schema'),
self.wrap_view('get_schema'),
name='api_get_schema',
),
url(
r'^(?P<resource_name>%s)/(?P<project__slug>[a-z-_]+[a-z0-9-_]+)/$' # noqa
% self._meta.resource_name,
self.wrap_view('dispatch_list'),
name='api_version_list'),
url((
r'^(?P<resource_name>%s)/(?P<project_slug>[a-z-_]+[a-z0-9-_]+)/(?P'
r'<version_slug>[a-z0-9-_.]+)/build/$') %
self._meta.resource_name, self.wrap_view('build_version'),
name='api_version_build_slug'),
name='api_version_list',
),
url(
(
r'^(?P<resource_name>%s)/(?P<project_slug>[a-z-_]+[a-z0-9-_]+)/(?P'
r'<version_slug>[a-z0-9-_.]+)/build/$'
) % self._meta.resource_name,
self.wrap_view('build_version'),
name='api_version_build_slug',
),
]
@ -145,7 +157,7 @@ class FileResource(ModelResource):
project = fields.ForeignKey(ProjectResource, 'project', full=True)
class Meta(object):
class Meta:
allowed_methods = ['get', 'post']
queryset = ImportedFile.objects.all()
excludes = ['md5', 'slug']
@ -157,11 +169,15 @@ class FileResource(ModelResource):
return [
url(
r'^(?P<resource_name>%s)/schema/$' % self._meta.resource_name,
self.wrap_view('get_schema'), name='api_get_schema'),
self.wrap_view('get_schema'),
name='api_get_schema',
),
url(
r'^(?P<resource_name>%s)/anchor%s$' %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('get_anchor'), name='api_get_anchor'),
self.wrap_view('get_anchor'),
name='api_get_anchor',
),
]
def get_anchor(self, request, **__):
@ -190,7 +206,7 @@ class UserResource(ModelResource):
"""Read-only API resource for User model."""
class Meta(object):
class Meta:
allowed_methods = ['get']
queryset = User.objects.all()
fields = ['username', 'id']
@ -202,9 +218,12 @@ class UserResource(ModelResource):
return [
url(
r'^(?P<resource_name>%s)/schema/$' % self._meta.resource_name,
self.wrap_view('get_schema'), name='api_get_schema'),
self.wrap_view('get_schema'),
name='api_get_schema',
),
url(
r'^(?P<resource_name>%s)/(?P<username>[a-z-_]+)/$' %
self._meta.resource_name, self.wrap_view('dispatch_detail'),
name='api_dispatch_detail'),
r'^(?P<resource_name>%s)/(?P<username>[a-z-_]+)/$' % self._meta.resource_name,
self.wrap_view('dispatch_detail'),
name='api_dispatch_detail',
),
]

View File

@ -1,16 +1,14 @@
# -*- coding: utf-8 -*-
"""Slumber API client."""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
import logging
from django.conf import settings
import requests
from django.conf import settings
from requests_toolbelt.adapters import host_header_ssl
from slumber import API
log = logging.getLogger(__name__)
PRODUCTION_DOMAIN = getattr(settings, 'PRODUCTION_DOMAIN', 'readthedocs.org')

View File

@ -1,13 +1,13 @@
"""Utility classes for api module"""
from __future__ import absolute_import
# -*- coding: utf-8 -*-
"""Utility classes for api module."""
import logging
from django.utils.translation import ugettext
from tastypie.authentication import BasicAuthentication
from tastypie.authorization import Authorization
from tastypie.resources import ModelResource
from tastypie.exceptions import NotFound
from tastypie.resources import ModelResource
log = logging.getLogger(__name__)
@ -18,14 +18,14 @@ class PostAuthentication(BasicAuthentication):
"""Require HTTP Basic authentication for any method other than GET."""
def is_authenticated(self, request, **kwargs):
val = super(PostAuthentication, self).is_authenticated(request,
**kwargs)
if request.method == "GET":
val = super().is_authenticated(request, **kwargs)
if request.method == 'GET':
return True
return val
class EnhancedModelResource(ModelResource):
def obj_get_list(self, request=None, *_, **kwargs): # noqa
"""
A ORM-specific implementation of ``obj_get_list``.
@ -44,12 +44,16 @@ class EnhancedModelResource(ModelResource):
try:
return self.get_object_list(request).filter(**applicable_filters)
except ValueError as e:
raise NotFound(ugettext("Invalid resource lookup data provided "
"(mismatched type).: %(error)s")
% {'error': e})
raise NotFound(
ugettext(
'Invalid resource lookup data provided '
'(mismatched type).: %(error)s',
) % {'error': e},
)
class OwnerAuthorization(Authorization):
def apply_limits(self, request, object_list):
if request and hasattr(request, 'user') and request.method != 'GET':
if request.user.is_authenticated:

View File

@ -1,10 +1,12 @@
# -*- coding: utf-8 -*-
"""Django admin interface for `~builds.models.Build` and related models."""
from __future__ import absolute_import
from django.contrib import admin
from readthedocs.builds.models import Build, Version, BuildCommandResult
from guardian.admin import GuardedModelAdmin
from readthedocs.builds.models import Build, BuildCommandResult, Version
class BuildCommandResultInline(admin.TabularInline):
model = BuildCommandResult
@ -12,8 +14,25 @@ class BuildCommandResultInline(admin.TabularInline):
class BuildAdmin(admin.ModelAdmin):
fields = ('project', 'version', 'type', 'state', 'error', 'success', 'length', 'cold_storage')
list_display = ('id', 'project', 'version_name', 'success', 'type', 'state', 'date')
fields = (
'project',
'version',
'type',
'state',
'error',
'success',
'length',
'cold_storage',
)
list_display = (
'id',
'project',
'version_name',
'success',
'type',
'state',
'date',
)
list_filter = ('type', 'state', 'success')
list_select_related = ('project', 'version')
raw_id_fields = ('project', 'version')
@ -26,7 +45,14 @@ class BuildAdmin(admin.ModelAdmin):
class VersionAdmin(GuardedModelAdmin):
search_fields = ('slug', 'project__name')
list_display = ('slug', 'type', 'project', 'privacy_level', 'active', 'built')
list_display = (
'slug',
'type',
'project',
'privacy_level',
'active',
'built',
)
list_filter = ('type', 'privacy_level', 'active', 'built')
raw_id_fields = ('project',)

View File

@ -1,8 +1,10 @@
# -*- coding: utf-8 -*-
"""Constants for the builds app."""
from __future__ import absolute_import
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
BUILD_STATE_TRIGGERED = 'triggered'
BUILD_STATE_CLONING = 'cloning'

View File

@ -1,13 +1,7 @@
# -*- coding: utf-8 -*-
"""Django forms for the builds app."""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from builtins import object
from django import forms
from django.utils.translation import ugettext_lazy as _
@ -17,7 +11,7 @@ from readthedocs.core.utils import trigger_build
class VersionForm(forms.ModelForm):
class Meta(object):
class Meta:
model = Version
fields = ['active', 'privacy_level', 'tags']
@ -26,10 +20,10 @@ class VersionForm(forms.ModelForm):
if self._is_default_version() and not active:
msg = _(
'{version} is the default version of the project, '
'it should be active.'
'it should be active.',
)
raise forms.ValidationError(
msg.format(version=self.instance.verbose_name)
msg.format(version=self.instance.verbose_name),
)
return active
@ -38,7 +32,7 @@ class VersionForm(forms.ModelForm):
return project.default_version == self.instance.slug
def save(self, commit=True):
obj = super(VersionForm, self).save(commit=commit)
obj = super().save(commit=commit)
if obj.active and not obj.built and not obj.uploaded:
trigger_build(project=obj.project, version=obj)
return obj

View File

@ -1,14 +1,23 @@
"""Build and Version class model Managers"""
# -*- coding: utf-8 -*-
from __future__ import absolute_import
"""Build and Version class model Managers."""
from django.db import models
from .constants import (BRANCH, TAG, LATEST, LATEST_VERBOSE_NAME, STABLE,
STABLE_VERBOSE_NAME)
from readthedocs.core.utils.extend import (
SettingsOverrideObject,
get_override_class,
)
from .constants import (
BRANCH,
LATEST,
LATEST_VERBOSE_NAME,
STABLE,
STABLE_VERBOSE_NAME,
TAG,
)
from .querysets import VersionQuerySet
from readthedocs.core.utils.extend import (SettingsOverrideObject,
get_override_class)
__all__ = ['VersionManager']
@ -30,9 +39,9 @@ class VersionManagerBase(models.Manager):
# no direct members.
queryset_class = get_override_class(
VersionQuerySet,
VersionQuerySet._default_class # pylint: disable=protected-access
VersionQuerySet._default_class, # pylint: disable=protected-access
)
return super(VersionManagerBase, cls).from_queryset(queryset_class, class_name)
return super().from_queryset(queryset_class, class_name)
def create_stable(self, **kwargs):
defaults = {

View File

@ -1,10 +1,8 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import models, migrations
import readthedocs.builds.version_slug
import taggit.managers
from django.db import migrations, models
import readthedocs.builds.version_slug
class Migration(migrations.Migration):
@ -77,10 +75,10 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name='version',
unique_together=set([('project', 'slug')]),
unique_together={('project', 'slug')},
),
migrations.AlterIndexTogether(
name='build',
index_together=set([('version', 'state', 'type')]),
index_together={('version', 'state', 'type')},
),
]

View File

@ -1,8 +1,6 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from __future__ import absolute_import
from django.db import models, migrations
import readthedocs.builds.models

View File

@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-10-09 20:14
from __future__ import unicode_literals
from django.db import migrations, models

View File

@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-10-27 00:17
from __future__ import unicode_literals
from django.db import migrations

View File

@ -1,8 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-10-17 04:20
from __future__ import unicode_literals
from django.db import migrations, models
import readthedocs.builds.version_slug

View File

@ -1,9 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-02 13:24
from __future__ import unicode_literals
from django.db import migrations
import jsonfield.fields
from django.db import migrations
class Migration(migrations.Migration):

View File

@ -1,21 +1,15 @@
# -*- coding: utf-8 -*-
"""Models for the builds app."""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
"""Models for the builds app."""
import logging
import os.path
import re
from shutil import rmtree
from builtins import object
from django.conf import settings
from django.db import models
from django.urls import reverse
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext
@ -23,7 +17,6 @@ from django.utils.translation import ugettext_lazy as _
from guardian.shortcuts import assign
from jsonfield import JSONField
from taggit.managers import TaggableManager
from django.urls import reverse
from readthedocs.core.utils import broadcast
from readthedocs.projects.constants import (
@ -55,8 +48,12 @@ from .utils import (
)
from .version_slug import VersionSlugField
DEFAULT_VERSION_PRIVACY_LEVEL = getattr(
settings, 'DEFAULT_VERSION_PRIVACY_LEVEL', 'public')
settings,
'DEFAULT_VERSION_PRIVACY_LEVEL',
'public',
)
log = logging.getLogger(__name__)
@ -96,7 +93,10 @@ class Version(models.Model):
#: filesystem to determine how the paths for this version are called. It
#: must not be used for any other identifying purposes.
slug = VersionSlugField(
_('Slug'), max_length=255, populate_from='verbose_name')
_('Slug'),
max_length=255,
populate_from='verbose_name',
)
supported = models.BooleanField(_('Supported'), default=True)
active = models.BooleanField(_('Active'), default=False)
@ -114,13 +114,14 @@ class Version(models.Model):
objects = VersionManager.from_queryset(VersionQuerySet)()
class Meta(object):
class Meta:
unique_together = [('project', 'slug')]
ordering = ['-verbose_name']
permissions = (
# Translators: Permission around whether a user can view the
# version
('view_version', _('View Version')),)
('view_version', _('View Version')),
)
def __str__(self):
return ugettext(
@ -128,7 +129,8 @@ class Version(models.Model):
version=self.verbose_name,
project=self.project,
pk=self.pk,
))
),
)
@property
def config(self):
@ -139,9 +141,10 @@ class Version(models.Model):
:rtype: dict
"""
last_build = (
self.builds.filter(state='finished', success=True)
.order_by('-date')
.first()
self.builds.filter(
state='finished',
success=True,
).order_by('-date').first()
)
return last_build.config
@ -184,7 +187,9 @@ class Version(models.Model):
# If we came that far it's not a special version nor a branch or tag.
# Therefore just return the identifier to make a safe guess.
log.debug('TODO: Raise an exception here. Testing what cases it happens')
log.debug(
'TODO: Raise an exception here. Testing what cases it happens',
)
return self.identifier
def get_absolute_url(self):
@ -198,16 +203,21 @@ class Version(models.Model):
)
private = self.privacy_level == PRIVATE
return self.project.get_docs_url(
version_slug=self.slug, private=private)
version_slug=self.slug,
private=private,
)
def save(self, *args, **kwargs): # pylint: disable=arguments-differ
"""Add permissions to the Version for all owners on save."""
from readthedocs.projects import tasks
obj = super(Version, self).save(*args, **kwargs)
obj = super().save(*args, **kwargs)
for owner in self.project.users.all():
assign('view_version', owner, self)
broadcast(
type='app', task=tasks.symlink_project, args=[self.project.pk])
type='app',
task=tasks.symlink_project,
args=[self.project.pk],
)
return obj
def delete(self, *args, **kwargs): # pylint: disable=arguments-differ
@ -219,7 +229,7 @@ class Version(models.Model):
args=[self.get_artifact_paths()],
)
project_pk = self.project.pk
super(Version, self).delete(*args, **kwargs)
super().delete(*args, **kwargs)
broadcast(
type='app',
task=tasks.symlink_project,
@ -253,19 +263,27 @@ class Version(models.Model):
data['PDF'] = project.get_production_media_url('pdf', self.slug)
if project.has_htmlzip(self.slug):
data['HTML'] = project.get_production_media_url(
'htmlzip', self.slug)
'htmlzip',
self.slug,
)
if project.has_epub(self.slug):
data['Epub'] = project.get_production_media_url(
'epub', self.slug)
'epub',
self.slug,
)
else:
if project.has_pdf(self.slug):
data['pdf'] = project.get_production_media_url('pdf', self.slug)
if project.has_htmlzip(self.slug):
data['htmlzip'] = project.get_production_media_url(
'htmlzip', self.slug)
'htmlzip',
self.slug,
)
if project.has_epub(self.slug):
data['epub'] = project.get_production_media_url(
'epub', self.slug)
'epub',
self.slug,
)
return data
def get_conf_py_path(self):
@ -291,9 +309,8 @@ class Version(models.Model):
for type_ in ('pdf', 'epub', 'htmlzip'):
paths.append(
self.project.get_production_media_path(
type_=type_,
version_slug=self.slug),
self.project
.get_production_media_path(type_=type_, version_slug=self.slug),
)
paths.append(self.project.rtd_build_path(version=self.slug))
@ -315,7 +332,12 @@ class Version(models.Model):
log.exception('Build path cleanup failed')
def get_github_url(
self, docroot, filename, source_suffix='.rst', action='view'):
self,
docroot,
filename,
source_suffix='.rst',
action='view',
):
"""
Return a GitHub URL for a given filename.
@ -357,7 +379,12 @@ class Version(models.Model):
)
def get_gitlab_url(
self, docroot, filename, source_suffix='.rst', action='view'):
self,
docroot,
filename,
source_suffix='.rst',
action='view',
):
repo_url = self.project.repo
if 'gitlab' not in repo_url:
return ''
@ -442,7 +469,7 @@ class APIVersion(Version):
del kwargs[key]
except KeyError:
pass
super(APIVersion, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
def save(self, *args, **kwargs):
return 0
@ -454,13 +481,28 @@ class Build(models.Model):
"""Build data."""
project = models.ForeignKey(
Project, verbose_name=_('Project'), related_name='builds')
Project,
verbose_name=_('Project'),
related_name='builds',
)
version = models.ForeignKey(
Version, verbose_name=_('Version'), null=True, related_name='builds')
Version,
verbose_name=_('Version'),
null=True,
related_name='builds',
)
type = models.CharField(
_('Type'), max_length=55, choices=BUILD_TYPES, default='html')
_('Type'),
max_length=55,
choices=BUILD_TYPES,
default='html',
)
state = models.CharField(
_('State'), max_length=55, choices=BUILD_STATE, default='finished')
_('State'),
max_length=55,
choices=BUILD_STATE,
default='finished',
)
date = models.DateTimeField(_('Date'), auto_now_add=True)
success = models.BooleanField(_('Success'), default=True)
@ -470,16 +512,26 @@ class Build(models.Model):
error = models.TextField(_('Error'), default='', blank=True)
exit_code = models.IntegerField(_('Exit code'), null=True, blank=True)
commit = models.CharField(
_('Commit'), max_length=255, null=True, blank=True)
_('Commit'),
max_length=255,
null=True,
blank=True,
)
_config = JSONField(_('Configuration used in the build'), default=dict)
length = models.IntegerField(_('Build Length'), null=True, blank=True)
builder = models.CharField(
_('Builder'), max_length=255, null=True, blank=True)
_('Builder'),
max_length=255,
null=True,
blank=True,
)
cold_storage = models.NullBooleanField(
_('Cold Storage'), help_text='Build steps stored outside the database.')
_('Cold Storage'),
help_text='Build steps stored outside the database.',
)
# Manager
@ -487,13 +539,13 @@ class Build(models.Model):
CONFIG_KEY = '__config'
class Meta(object):
class Meta:
ordering = ['-date']
get_latest_by = 'date'
index_together = [['version', 'state', 'type']]
def __init__(self, *args, **kwargs):
super(Build, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self._config_changed = False
@property
@ -506,14 +558,11 @@ class Build(models.Model):
date = self.date or timezone.now()
if self.project is not None and self.version is not None:
return (
Build.objects
.filter(
Build.objects.filter(
project=self.project,
version=self.version,
date__lt=date,
)
.order_by('-date')
.first()
).order_by('-date').first()
)
return None
@ -523,9 +572,9 @@ class Build(models.Model):
Get the config used for this build.
Since we are saving the config into the JSON field only when it differs
from the previous one, this helper returns the correct JSON used in
this Build object (it could be stored in this object or one of the
previous ones).
from the previous one, this helper returns the correct JSON used in this
Build object (it could be stored in this object or one of the previous
ones).
"""
if self.CONFIG_KEY in self._config:
return Build.objects.get(pk=self._config[self.CONFIG_KEY])._config
@ -553,11 +602,13 @@ class Build(models.Model):
"""
if self.pk is None or self._config_changed:
previous = self.previous
if (previous is not None and
self._config and self._config == previous.config):
if (
previous is not None and self._config and
self._config == previous.config
):
previous_pk = previous._config.get(self.CONFIG_KEY, previous.pk)
self._config = {self.CONFIG_KEY: previous_pk}
super(Build, self).save(*args, **kwargs)
super().save(*args, **kwargs)
self._config_changed = False
def __str__(self):
@ -568,7 +619,8 @@ class Build(models.Model):
self.project.users.all().values_list('username', flat=True),
),
pk=self.pk,
))
),
)
def get_absolute_url(self):
return reverse('builds_detail', args=[self.project.slug, self.pk])
@ -579,7 +631,7 @@ class Build(models.Model):
return self.state == BUILD_STATE_FINISHED
class BuildCommandResultMixin(object):
class BuildCommandResultMixin:
"""
Mixin for common command result methods/properties.
@ -609,7 +661,10 @@ class BuildCommandResult(BuildCommandResultMixin, models.Model):
"""Build command for a ``Build``."""
build = models.ForeignKey(
Build, verbose_name=_('Build'), related_name='commands')
Build,
verbose_name=_('Build'),
related_name='commands',
)
command = models.TextField(_('Command'))
description = models.TextField(_('Description'), blank=True)
@ -619,7 +674,7 @@ class BuildCommandResult(BuildCommandResultMixin, models.Model):
start_time = models.DateTimeField(_('Start time'))
end_time = models.DateTimeField(_('End time'))
class Meta(object):
class Meta:
ordering = ['start_time']
get_latest_by = 'start_time'
@ -628,7 +683,8 @@ class BuildCommandResult(BuildCommandResultMixin, models.Model):
def __str__(self):
return (
ugettext('Build command {pk} for build {build}')
.format(pk=self.pk, build=self.build))
.format(pk=self.pk, build=self.build)
)
@property
def run_time(self):

View File

@ -1,6 +1,6 @@
"""Build and Version QuerySet classes"""
# -*- coding: utf-8 -*-
from __future__ import absolute_import
"""Build and Version QuerySet classes."""
from django.db import models
from guardian.shortcuts import get_objects_for_user
@ -37,7 +37,9 @@ class VersionQuerySetBase(models.QuerySet):
return queryset
def protected(self, user=None, project=None, only_active=True):
queryset = self.filter(privacy_level__in=[constants.PUBLIC, constants.PROTECTED])
queryset = self.filter(
privacy_level__in=[constants.PUBLIC, constants.PROTECTED],
)
if user:
queryset = self._add_user_repos(queryset, user)
if project:
@ -60,10 +62,10 @@ class VersionQuerySetBase(models.QuerySet):
return self.public(user, only_active=False)
def for_project(self, project):
"""Return all versions for a project, including translations"""
"""Return all versions for a project, including translations."""
return self.filter(
models.Q(project=project) |
models.Q(project__main_language_project=project)
models.Q(project__main_language_project=project),
)
@ -119,8 +121,7 @@ class RelatedBuildQuerySetBase(models.QuerySet):
if user.is_authenticated:
user_queryset = get_objects_for_user(user, 'builds.view_version')
pks = user_queryset.values_list('pk', flat=True)
queryset = self.filter(
build__version__pk__in=pks) | queryset
queryset = self.filter(build__version__pk__in=pks,) | queryset
return queryset.distinct()
def public(self, user=None, project=None):

View File

@ -1,6 +1,7 @@
"""Build signals"""
# -*- coding: utf-8 -*-
"""Build signals."""
from __future__ import absolute_import
import django.dispatch

View File

@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
"""
Classes to copy files between build and web servers.
@ -5,26 +7,23 @@ Classes to copy files between build and web servers.
local machine.
"""
from __future__ import absolute_import
import getpass
import logging
import os
import shutil
from builtins import object
from django.conf import settings
from readthedocs.core.utils.extend import SettingsOverrideObject
from readthedocs.core.utils import safe_makedirs
from readthedocs.core.utils.extend import SettingsOverrideObject
log = logging.getLogger(__name__)
class BaseSyncer(object):
class BaseSyncer:
"""A base object for syncers and pullers"""
"""A base object for syncers and pullers."""
@classmethod
def copy(cls, path, target, is_file=False, **kwargs):
@ -36,7 +35,7 @@ class LocalSyncer(BaseSyncer):
@classmethod
def copy(cls, path, target, is_file=False, **kwargs):
"""A copy command that works with files or directories."""
log.info("Local Copy %s to %s", path, target)
log.info('Local Copy %s to %s', path, target)
if is_file:
if path == target:
# Don't copy the same file over itself
@ -62,28 +61,31 @@ class RemoteSyncer(BaseSyncer):
sync_user = getattr(settings, 'SYNC_USER', getpass.getuser())
app_servers = getattr(settings, 'MULTIPLE_APP_SERVERS', [])
if app_servers:
log.info("Remote Copy %s to %s on %s", path, target, app_servers)
log.info('Remote Copy %s to %s on %s', path, target, app_servers)
for server in app_servers:
mkdir_cmd = ("ssh %s@%s mkdir -p %s" % (sync_user, server, target))
mkdir_cmd = (
'ssh {}@{} mkdir -p {}'.format(sync_user, server, target)
)
ret = os.system(mkdir_cmd)
if ret != 0:
log.debug("Copy error to app servers: cmd=%s", mkdir_cmd)
log.debug('Copy error to app servers: cmd=%s', mkdir_cmd)
if is_file:
slash = ""
slash = ''
else:
slash = "/"
slash = '/'
# Add a slash when copying directories
sync_cmd = (
"rsync -e 'ssh -T' -av --delete {path}{slash} {user}@{server}:{target}"
.format(
"rsync -e 'ssh -T' -av --delete {path}{slash} {user}@{server}:{target}".format(
path=path,
slash=slash,
user=sync_user,
server=server,
target=target))
target=target,
)
)
ret = os.system(sync_cmd)
if ret != 0:
log.debug("Copy error to app servers: cmd=%s", sync_cmd)
log.debug('Copy error to app servers: cmd=%s', sync_cmd)
class DoubleRemotePuller(BaseSyncer):
@ -98,29 +100,32 @@ class DoubleRemotePuller(BaseSyncer):
sync_user = getattr(settings, 'SYNC_USER', getpass.getuser())
app_servers = getattr(settings, 'MULTIPLE_APP_SERVERS', [])
if not is_file:
path += "/"
log.info("Remote Copy %s to %s", path, target)
path += '/'
log.info('Remote Copy %s to %s', path, target)
for server in app_servers:
if not is_file:
mkdir_cmd = "ssh {user}@{server} mkdir -p {target}".format(
user=sync_user, server=server, target=target
mkdir_cmd = 'ssh {user}@{server} mkdir -p {target}'.format(
user=sync_user,
server=server,
target=target,
)
ret = os.system(mkdir_cmd)
if ret != 0:
log.debug("MkDir error to app servers: cmd=%s", mkdir_cmd)
log.debug('MkDir error to app servers: cmd=%s', mkdir_cmd)
# Add a slash when copying directories
sync_cmd = (
"ssh {user}@{server} 'rsync -av "
"--delete --exclude projects {user}@{host}:{path} {target}'"
.format(
"--delete --exclude projects {user}@{host}:{path} {target}'".format(
host=host,
path=path,
user=sync_user,
server=server,
target=target))
target=target,
)
)
ret = os.system(sync_cmd)
if ret != 0:
log.debug("Copy error to app servers: cmd=%s", sync_cmd)
log.debug('Copy error to app servers: cmd=%s', sync_cmd)
class RemotePuller(BaseSyncer):
@ -134,8 +139,8 @@ class RemotePuller(BaseSyncer):
"""
sync_user = getattr(settings, 'SYNC_USER', getpass.getuser())
if not is_file:
path += "/"
log.info("Remote Pull %s to %s", path, target)
path += '/'
log.info('Remote Pull %s to %s', path, target)
if not is_file and not os.path.exists(target):
safe_makedirs(target)
# Add a slash when copying directories
@ -148,7 +153,7 @@ class RemotePuller(BaseSyncer):
ret = os.system(sync_cmd)
if ret != 0:
log.debug(
"Copy error to app servers. Command: [%s] Return: [%s]",
'Copy error to app servers. Command: [%s] Return: [%s]',
sync_cmd,
ret,
)

View File

@ -1,12 +1,11 @@
# -*- coding: utf-8 -*-
"""URL configuration for builds app."""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
"""URL configuration for builds app."""
from django.conf.urls import url
from .views import builds_redirect_detail, builds_redirect_list
urlpatterns = [
url(
r'^(?P<project_slug>[-\w]+)/(?P<pk>\d+)/$',

View File

@ -1,11 +1,12 @@
# -*- coding: utf-8 -*-
"""Utilities for the builds app."""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from readthedocs.projects.constants import (
BITBUCKET_REGEXS, GITHUB_REGEXS, GITLAB_REGEXS)
BITBUCKET_REGEXS,
GITHUB_REGEXS,
GITLAB_REGEXS,
)
def get_github_username_repo(url):

View File

@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
"""
Contains logic for handling version slugs.
@ -17,8 +19,6 @@ slug. This is used since using numbers in tags is too common and appending
another number would be confusing.
"""
from __future__ import absolute_import
import math
import re
import string
@ -26,7 +26,6 @@ from operator import truediv
from django.db import models
from django.utils.encoding import force_text
from builtins import range
def get_fields_with_model(cls):
@ -37,12 +36,10 @@ def get_fields_with_model(cls):
prescrived in the Django docs.
https://docs.djangoproject.com/en/1.11/ref/models/meta/#migrating-from-the-old-api
"""
return [
(f, f.model if f.model != cls else None)
for f in cls._meta.get_fields()
if not f.is_relation or f.one_to_one or
(f.many_to_one and f.related_model)
]
return [(f, f.model if f.model != cls else None)
for f in cls._meta.get_fields()
if not f.is_relation or f.one_to_one or
(f.many_to_one and f.related_model)]
# Regex breakdown:
@ -72,7 +69,7 @@ class VersionSlugField(models.CharField):
raise ValueError("missing 'populate_from' argument")
else:
self._populate_from = populate_from
super(VersionSlugField, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
def get_queryset(self, model_cls, slug_field):
# pylint: disable=protected-access
@ -168,7 +165,8 @@ class VersionSlugField(models.CharField):
count += 1
assert self.test_pattern.match(slug), (
'Invalid generated slug: {slug}'.format(slug=slug))
'Invalid generated slug: {slug}'.format(slug=slug)
)
return slug
def pre_save(self, model_instance, add):
@ -180,6 +178,6 @@ class VersionSlugField(models.CharField):
return value
def deconstruct(self):
name, path, args, kwargs = super(VersionSlugField, self).deconstruct()
name, path, args, kwargs = super().deconstruct()
kwargs['populate_from'] = self._populate_from
return name, path, args, kwargs

View File

@ -2,16 +2,9 @@
"""Views for builds app."""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import logging
import textwrap
from builtins import object
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.http import (
@ -23,7 +16,10 @@ from django.shortcuts import get_object_or_404
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.views.generic import DetailView, ListView
from requests.utils import quote
from urllib.parse import urlparse
from readthedocs.doc_builder.exceptions import BuildEnvironmentError
from readthedocs.builds.models import Build, Version
from readthedocs.core.permissions import AdminPermission
from readthedocs.core.utils import trigger_build
@ -33,7 +29,7 @@ from readthedocs.projects.models import Project
log = logging.getLogger(__name__)
class BuildBase(object):
class BuildBase:
model = Build
def get_queryset(self):
@ -43,13 +39,14 @@ class BuildBase(object):
slug=self.project_slug,
)
queryset = Build.objects.public(
user=self.request.user, project=self.project
user=self.request.user,
project=self.project,
)
return queryset
class BuildTriggerMixin(object):
class BuildTriggerMixin:
@method_decorator(login_required)
def post(self, request, project_slug):
@ -65,7 +62,10 @@ class BuildTriggerMixin(object):
slug=version_slug,
)
update_docs_task, build = trigger_build(project=project, version=version)
update_docs_task, build = trigger_build(
project=project,
version=version,
)
if (update_docs_task, build) == (None, None):
# Build was skipped
messages.add_message(
@ -85,15 +85,17 @@ class BuildTriggerMixin(object):
class BuildList(BuildBase, BuildTriggerMixin, ListView):
def get_context_data(self, **kwargs):
context = super(BuildList, self).get_context_data(**kwargs)
context = super().get_context_data(**kwargs)
active_builds = self.get_queryset().exclude(state='finished'
).values('id')
active_builds = self.get_queryset().exclude(
state='finished',
).values('id')
context['project'] = self.project
context['active_builds'] = active_builds
context['versions'] = Version.objects.public(
user=self.request.user, project=self.project
user=self.request.user,
project=self.project,
)
context['build_qs'] = self.get_queryset()
@ -104,8 +106,51 @@ class BuildDetail(BuildBase, DetailView):
pk_url_kwarg = 'build_pk'
def get_context_data(self, **kwargs):
context = super(BuildDetail, self).get_context_data(**kwargs)
context = super().get_context_data(**kwargs)
context['project'] = self.project
build = self.get_object()
if build.error != BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format(build_id=build.pk):
# Do not suggest to open an issue if the error is not generic
return context
scheme = (
'https://github.com/rtfd/readthedocs.org/issues/new'
'?title={title}{build_id}'
'&body={body}'
)
# TODO: we could use ``.github/ISSUE_TEMPLATE.md`` here, but we would
# need to add some variables to it which could impact in the UX when
# filling an issue from the web
body = """
## Details:
* Project URL: https://readthedocs.org/projects/{project_slug}/
* Build URL(if applicable): https://readthedocs.org{build_path}
* Read the Docs username(if applicable): {username}
## Expected Result
*A description of what you wanted to happen*
## Actual Result
*A description of what actually happened*""".format(
project_slug=self.project,
build_path=self.request.path,
username=self.request.user,
)
scheme_dict = {
'title': quote('Build error with build id #'),
'build_id': context['build'].id,
'body': quote(textwrap.dedent(body)),
}
issue_url = scheme.format(**scheme_dict)
issue_url = urlparse(issue_url).geturl()
context['issue_url'] = issue_url
return context
@ -114,11 +159,11 @@ class BuildDetail(BuildBase, DetailView):
def builds_redirect_list(request, project_slug): # pylint: disable=unused-argument
return HttpResponsePermanentRedirect(
reverse('builds_project_list', args=[project_slug])
reverse('builds_project_list', args=[project_slug]),
)
def builds_redirect_detail(request, project_slug, pk): # pylint: disable=unused-argument
return HttpResponsePermanentRedirect(
reverse('builds_detail', args=[project_slug, pk])
reverse('builds_detail', args=[project_slug, pk]),
)

View File

@ -1,2 +1,5 @@
# -*- coding: utf-8 -*-
"""Logic to parse and validate ``readthedocs.yaml`` file."""
from .config import * # noqa
from .parser import * # noqa

View File

@ -1,20 +1,27 @@
# -*- coding: utf-8 -*-
# pylint: disable=too-many-lines
"""Build configuration for rtd."""
from __future__ import division, print_function, unicode_literals
import copy
import os
import re
from contextlib import contextmanager
import six
from django.conf import settings
from readthedocs.config.utils import list_to_dict, to_dict
from readthedocs.projects.constants import DOCUMENTATION_CHOICES
from .find import find_one
from .models import Build, Conda, Mkdocs, Python, Sphinx, Submodules
from .models import (
Build,
Conda,
Mkdocs,
Python,
PythonInstall,
PythonInstallRequirements,
Sphinx,
Submodules,
)
from .parser import ParseError, parse
from .validation import (
VALUE_NOT_FOUND,
@ -28,6 +35,7 @@ from .validation import (
validate_string,
)
__all__ = (
'ALL',
'load',
@ -36,47 +44,35 @@ __all__ = (
'ConfigError',
'ConfigOptionNotSupportedError',
'InvalidConfig',
'PIP',
'SETUPTOOLS',
)
ALL = 'all'
PIP = 'pip'
SETUPTOOLS = 'setuptools'
CONFIG_FILENAME_REGEX = r'^\.?readthedocs.ya?ml$'
CONFIG_NOT_SUPPORTED = 'config-not-supported'
VERSION_INVALID = 'version-invalid'
BASE_INVALID = 'base-invalid'
BASE_NOT_A_DIR = 'base-not-a-directory'
CONFIG_SYNTAX_INVALID = 'config-syntax-invalid'
CONFIG_REQUIRED = 'config-required'
NAME_REQUIRED = 'name-required'
NAME_INVALID = 'name-invalid'
CONF_FILE_REQUIRED = 'conf-file-required'
PYTHON_INVALID = 'python-invalid'
SUBMODULES_INVALID = 'submodules-invalid'
INVALID_KEYS_COMBINATION = 'invalid-keys-combination'
INVALID_KEY = 'invalid-key'
DOCKER_DEFAULT_IMAGE = 'readthedocs/build'
DOCKER_DEFAULT_VERSION = '2.0'
DOCKER_DEFAULT_IMAGE = getattr(settings, 'DOCKER_DEFAULT_IMAGE', 'readthedocs/build')
DOCKER_DEFAULT_VERSION = getattr(settings, 'DOCKER_DEFAULT_VERSION', '2.0')
# These map to corresponding settings in the .org,
# so they haven't been renamed.
DOCKER_IMAGE = '{}:{}'.format(DOCKER_DEFAULT_IMAGE, DOCKER_DEFAULT_VERSION)
DOCKER_IMAGE_SETTINGS = {
'readthedocs/build:1.0': {
'python': {'supported_versions': [2, 2.7, 3, 3.4]},
},
'readthedocs/build:2.0': {
'python': {'supported_versions': [2, 2.7, 3, 3.5]},
},
'readthedocs/build:3.0': {
'python': {'supported_versions': [2, 2.7, 3, 3.3, 3.4, 3.5, 3.6]},
},
'readthedocs/build:stable': {
'python': {'supported_versions': [2, 2.7, 3, 3.3, 3.4, 3.5, 3.6]},
},
'readthedocs/build:latest': {
'python': {'supported_versions': [2, 2.7, 3, 3.3, 3.4, 3.5, 3.6]},
},
}
DOCKER_IMAGE = getattr(
settings,
'DOCKER_IMAGE',
'{}:{}'.format(DOCKER_DEFAULT_IMAGE, DOCKER_DEFAULT_VERSION)
)
DOCKER_IMAGE_SETTINGS = getattr(settings, 'DOCKER_IMAGE_SETTINGS', {})
class ConfigError(Exception):
@ -85,7 +81,7 @@ class ConfigError(Exception):
def __init__(self, message, code):
self.code = code
super(ConfigError, self).__init__(message)
super().__init__(message)
class ConfigOptionNotSupportedError(ConfigError):
@ -97,9 +93,9 @@ class ConfigOptionNotSupportedError(ConfigError):
template = (
'The "{}" configuration option is not supported in this version'
)
super(ConfigOptionNotSupportedError, self).__init__(
super().__init__(
template.format(self.configuration),
CONFIG_NOT_SUPPORTED
CONFIG_NOT_SUPPORTED,
)
@ -118,10 +114,10 @@ class InvalidConfig(ConfigError):
code=code,
error=error_message,
)
super(InvalidConfig, self).__init__(message, code=code)
super().__init__(message, code=code)
class BuildConfigBase(object):
class BuildConfigBase:
"""
Config that handles the build of one particular documentation.
@ -140,15 +136,21 @@ class BuildConfigBase(object):
"""
PUBLIC_ATTRIBUTES = [
'version', 'formats', 'python',
'conda', 'build', 'doctype',
'sphinx', 'mkdocs', 'submodules',
'version',
'formats',
'python',
'conda',
'build',
'doctype',
'sphinx',
'mkdocs',
'submodules',
]
version = None
def __init__(self, env_config, raw_config, source_file):
self.env_config = env_config
self.raw_config = raw_config
self.raw_config = copy.deepcopy(raw_config)
self.source_file = source_file
if os.path.isdir(self.source_file):
self.base_path = self.source_file
@ -229,7 +231,7 @@ class BuildConfigBase(object):
@property
def python_interpreter(self):
ver = self.python_full_version
return 'python{0}'.format(ver)
return 'python{}'.format(ver)
@property
def python_full_version(self):
@ -248,10 +250,7 @@ class BuildConfigBase(object):
config = {}
for name in self.PUBLIC_ATTRIBUTES:
attr = getattr(self, name)
if hasattr(attr, '_asdict'):
config[name] = attr._asdict()
else:
config[name] = attr
config[name] = to_dict(attr)
return config
def __getattr__(self, name):
@ -263,12 +262,6 @@ class BuildConfigV1(BuildConfigBase):
"""Version 1 of the configuration file."""
BASE_INVALID_MESSAGE = 'Invalid value for base: {base}'
BASE_NOT_A_DIR_MESSAGE = '"base" is not a directory: {base}'
NAME_REQUIRED_MESSAGE = 'Missing key "name"'
NAME_INVALID_MESSAGE = (
'Invalid name "{name}". Valid values must match {name_re}'
)
CONF_FILE_REQUIRED_MESSAGE = 'Missing key "conf_file"'
PYTHON_INVALID_MESSAGE = '"python" section must be a mapping.'
PYTHON_EXTRA_REQUIREMENTS_INVALID_MESSAGE = (
@ -306,63 +299,17 @@ class BuildConfigV1(BuildConfigBase):
``readthedocs.yml`` config file if not set
"""
# Validate env_config.
# TODO: this isn't used
self._config['output_base'] = self.validate_output_base()
# Validate the build environment first
# Must happen before `validate_python`!
self._config['build'] = self.validate_build()
# Validate raw_config. Order matters.
# TODO: this isn't used
self._config['name'] = self.validate_name()
# TODO: this isn't used
self._config['base'] = self.validate_base()
self._config['python'] = self.validate_python()
self._config['formats'] = self.validate_formats()
self._config['conda'] = self.validate_conda()
self._config['requirements_file'] = self.validate_requirements_file()
def validate_output_base(self):
"""Validates that ``output_base`` exists and set its absolute path."""
assert 'output_base' in self.env_config, (
'"output_base" required in "env_config"')
output_base = os.path.abspath(
os.path.join(
self.env_config.get('output_base', self.base_path),
)
)
return output_base
def validate_name(self):
"""Validates that name exists."""
name = self.raw_config.get('name', None)
if not name:
name = self.env_config.get('name', None)
if not name:
self.error('name', self.NAME_REQUIRED_MESSAGE, code=NAME_REQUIRED)
name_re = r'^[-_.0-9a-zA-Z]+$'
if not re.match(name_re, name):
self.error(
'name',
self.NAME_INVALID_MESSAGE.format(
name=name,
name_re=name_re),
code=NAME_INVALID)
return name
def validate_base(self):
"""Validates that path is a valid directory."""
if 'base' in self.raw_config:
base = self.raw_config['base']
else:
base = self.base_path
with self.catch_validation_error('base'):
base = validate_directory(base, self.base_path)
return base
def validate_build(self):
"""
Validate the build config settings.
@ -398,13 +345,11 @@ class BuildConfigV1(BuildConfigBase):
# Prepend proper image name to user's image name
build['image'] = '{}:{}'.format(
DOCKER_DEFAULT_IMAGE,
build['image']
build['image'],
)
# Update docker default settings from image name
if build['image'] in DOCKER_IMAGE_SETTINGS:
self.env_config.update(
DOCKER_IMAGE_SETTINGS[build['image']]
)
self.env_config.update(DOCKER_IMAGE_SETTINGS[build['image']])
# Allow to override specific project
config_image = self.defaults.get('build_image')
@ -431,20 +376,22 @@ class BuildConfigV1(BuildConfigBase):
self.error(
'python',
self.PYTHON_INVALID_MESSAGE,
code=PYTHON_INVALID)
code=PYTHON_INVALID,
)
# Validate use_system_site_packages.
if 'use_system_site_packages' in raw_python:
with self.catch_validation_error(
'python.use_system_site_packages'):
with self.catch_validation_error('python.use_system_site_packages'):
python['use_system_site_packages'] = validate_bool(
raw_python['use_system_site_packages'])
raw_python['use_system_site_packages'],
)
# Validate pip_install.
if 'pip_install' in raw_python:
with self.catch_validation_error('python.pip_install'):
python['install_with_pip'] = validate_bool(
raw_python['pip_install'])
raw_python['pip_install'],
)
# Validate extra_requirements.
if 'extra_requirements' in raw_python:
@ -453,29 +400,30 @@ class BuildConfigV1(BuildConfigBase):
self.error(
'python.extra_requirements',
self.PYTHON_EXTRA_REQUIREMENTS_INVALID_MESSAGE,
code=PYTHON_INVALID)
code=PYTHON_INVALID,
)
if not python['install_with_pip']:
python['extra_requirements'] = []
else:
for extra_name in raw_extra_requirements:
with self.catch_validation_error(
'python.extra_requirements'):
with self.catch_validation_error('python.extra_requirements'):
python['extra_requirements'].append(
validate_string(extra_name)
validate_string(extra_name),
)
# Validate setup_py_install.
if 'setup_py_install' in raw_python:
with self.catch_validation_error('python.setup_py_install'):
python['install_with_setup'] = validate_bool(
raw_python['setup_py_install'])
raw_python['setup_py_install'],
)
if 'version' in raw_python:
with self.catch_validation_error('python.version'):
# Try to convert strings to an int first, to catch '2', then
# a float, to catch '2.7'
version = raw_python['version']
if isinstance(version, six.string_types):
if isinstance(version, str):
try:
version = int(version)
except ValueError:
@ -502,7 +450,8 @@ class BuildConfigV1(BuildConfigBase):
if 'file' in raw_conda:
with self.catch_validation_error('conda.file'):
conda_environment = validate_file(
raw_conda['file'], self.base_path
raw_conda['file'],
self.base_path,
)
conda['environment'] = conda_environment
@ -518,7 +467,9 @@ class BuildConfigV1(BuildConfigBase):
if not requirements_file:
return None
with self.catch_validation_error('requirements_file'):
validate_file(requirements_file, self.base_path)
requirements_file = validate_file(
requirements_file, self.base_path
)
return requirements_file
def validate_formats(self):
@ -536,21 +487,6 @@ class BuildConfigV1(BuildConfigBase):
return formats
@property
def name(self):
"""The project name."""
return self._config['name']
@property
def base(self):
"""The base directory."""
return self._config['base']
@property
def output_base(self):
"""The output base."""
return self._config['output_base']
@property
def formats(self):
"""The documentation formats to be built."""
@ -559,9 +495,39 @@ class BuildConfigV1(BuildConfigBase):
@property
def python(self):
"""Python related configuration."""
python = self._config['python']
requirements = self._config['requirements_file']
self._config['python']['requirements'] = requirements
return Python(**self._config['python'])
python_install = []
# Always append a `PythonInstallRequirements` option.
# If requirements is None, rtd will try to find a requirements file.
python_install.append(
PythonInstallRequirements(
requirements=requirements,
)
)
if python['install_with_pip']:
python_install.append(
PythonInstall(
path=self.base_path,
method=PIP,
extra_requirements=python['extra_requirements'],
)
)
elif python['install_with_setup']:
python_install.append(
PythonInstall(
path=self.base_path,
method=SETUPTOOLS,
extra_requirements=[],
)
)
return Python(
version=python['version'],
install=python_install,
use_system_site_packages=python['use_system_site_packages'],
)
@property
def conda(self):
@ -613,7 +579,7 @@ class BuildConfigV2(BuildConfigBase):
valid_formats = ['htmlzip', 'pdf', 'epub']
valid_build_images = ['1.0', '2.0', '3.0', 'stable', 'latest']
default_build_image = 'latest'
valid_install_options = ['pip', 'setup.py']
valid_install_method = [PIP, SETUPTOOLS]
valid_sphinx_builders = {
'html': 'sphinx',
'htmldir': 'sphinx_htmldir',
@ -723,7 +689,7 @@ class BuildConfigV2(BuildConfigBase):
python = {}
with self.catch_validation_error('python.version'):
version = self.pop_config('python.version', 3)
if isinstance(version, six.string_types):
if isinstance(version, str):
try:
version = int(version)
except ValueError:
@ -736,38 +702,22 @@ class BuildConfigV2(BuildConfigBase):
self.get_valid_python_versions(),
)
with self.catch_validation_error('python.requirements'):
requirements = self.defaults.get('requirements_file')
requirements = self.pop_config('python.requirements', requirements)
if requirements != '' and requirements is not None:
requirements = validate_file(requirements, self.base_path)
python['requirements'] = requirements
with self.catch_validation_error('python.install'):
install = (
'setup.py' if self.defaults.get('install_project') else None
)
install = self.pop_config('python.install', install)
if install is not None:
validate_choice(install, self.valid_install_options)
python['install_with_setup'] = install == 'setup.py'
python['install_with_pip'] = install == 'pip'
with self.catch_validation_error('python.extra_requirements'):
extra_requirements = self.pop_config(
'python.extra_requirements', []
)
extra_requirements = validate_list(extra_requirements)
if extra_requirements and not python['install_with_pip']:
self.error(
'python.extra_requirements',
'You need to install your project with pip '
'to use extra_requirements',
code=PYTHON_INVALID,
raw_install = self.raw_config.get('python', {}).get('install', [])
validate_list(raw_install)
if raw_install:
# Transform to a dict, so it's easy to validate extra keys.
self.raw_config.setdefault('python', {})['install'] = (
list_to_dict(raw_install)
)
python['extra_requirements'] = [
validate_string(extra) for extra in extra_requirements
]
else:
self.pop_config('python.install')
raw_install = self.raw_config.get('python', {}).get('install', [])
python['install'] = [
self.validate_python_install(index)
for index in range(len(raw_install))
]
with self.catch_validation_error('python.system_packages'):
system_packages = self.defaults.get(
@ -782,6 +732,60 @@ class BuildConfigV2(BuildConfigBase):
return python
def validate_python_install(self, index):
"""Validates the python.install.{index} key."""
python_install = {}
key = 'python.install.{}'.format(index)
raw_install = self.raw_config['python']['install'][str(index)]
with self.catch_validation_error(key):
validate_dict(raw_install)
if 'requirements' in raw_install:
requirements_key = key + '.requirements'
with self.catch_validation_error(requirements_key):
requirements = validate_file(
self.pop_config(requirements_key),
self.base_path
)
python_install['requirements'] = requirements
elif 'path' in raw_install:
path_key = key + '.path'
with self.catch_validation_error(path_key):
path = validate_directory(
self.pop_config(path_key),
self.base_path
)
python_install['path'] = path
method_key = key + '.method'
with self.catch_validation_error(method_key):
method = validate_choice(
self.pop_config(method_key, PIP),
self.valid_install_method
)
python_install['method'] = method
extra_req_key = key + '.extra_requirements'
with self.catch_validation_error(extra_req_key):
extra_requirements = validate_list(
self.pop_config(extra_req_key, [])
)
if extra_requirements and python_install['method'] != PIP:
self.error(
extra_req_key,
'You need to install your project with pip '
'to use extra_requirements',
code=PYTHON_INVALID,
)
python_install['extra_requirements'] = extra_requirements
else:
self.error(
key,
'"path" or "requirements" key is required',
code=CONFIG_REQUIRED,
)
return python_install
def get_valid_python_versions(self):
"""
Get the valid python versions for the current docker image.
@ -873,7 +877,8 @@ class BuildConfigV2(BuildConfigBase):
if not configuration:
configuration = None
configuration = self.pop_config(
'sphinx.configuration', configuration
'sphinx.configuration',
configuration,
)
if configuration is not None:
configuration = validate_file(configuration, self.base_path)
@ -889,9 +894,8 @@ class BuildConfigV2(BuildConfigBase):
"""
Validates that the doctype is the same as the admin panel.
This a temporal validation, as the configuration file
should support per version doctype, but we need to
adapt the rtd code for that.
This a temporal validation, as the configuration file should support per
version doctype, but we need to adapt the rtd code for that.
"""
dashboard_doctype = self.defaults.get('doctype', 'sphinx')
if self.doctype != dashboard_doctype:
@ -901,7 +905,7 @@ class BuildConfigV2(BuildConfigBase):
if dashboard_doctype == 'mkdocs' or not self.sphinx:
error_msg += ' but there is no "{}" key specified.'.format(
'mkdocs' if dashboard_doctype == 'mkdocs' else 'sphinx'
'mkdocs' if dashboard_doctype == 'mkdocs' else 'sphinx',
)
else:
error_msg += ' but your "sphinx.builder" key does not match.'
@ -963,8 +967,8 @@ class BuildConfigV2(BuildConfigBase):
"""
Checks that we don't have extra keys (invalid ones).
This should be called after all the validations are done
and all keys are popped from `self.raw_config`.
This should be called after all the validations are done and all keys
are popped from `self.raw_config`.
"""
msg = (
'Invalid configuration option: {}. '
@ -1018,7 +1022,22 @@ class BuildConfigV2(BuildConfigBase):
@property
def python(self):
return Python(**self._config['python'])
python_install = []
python = self._config['python']
for install in python['install']:
if 'requirements' in install:
python_install.append(
PythonInstallRequirements(**install)
)
elif 'path' in install:
python_install.append(
PythonInstall(**install)
)
return Python(
version=python['version'],
install=python_install,
use_system_site_packages=python['use_system_site_packages'],
)
@property
def sphinx(self):
@ -1054,10 +1073,7 @@ def load(path, env_config):
filename = find_one(path, CONFIG_FILENAME_REGEX)
if not filename:
raise ConfigError(
'No configuration file found',
code=CONFIG_REQUIRED
)
raise ConfigError('No configuration file found', code=CONFIG_REQUIRED)
with open(filename, 'r') as configuration_file:
try:
config = parse(configuration_file.read())

View File

@ -1,6 +1,6 @@
"""Helper functions to search files."""
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
"""Helper functions to search files."""
import os
import re

View File

@ -1,37 +1,68 @@
# -*- coding: utf-8 -*-
"""Models for the response of the configuration object."""
from __future__ import division, print_function, unicode_literals
from collections import namedtuple
from readthedocs.config.utils import to_dict
Build = namedtuple('Build', ['image']) # noqa
class Base(object):
Python = namedtuple( # noqa
'Python',
[
'version',
'requirements',
'install_with_pip',
'install_with_setup',
'extra_requirements',
'use_system_site_packages',
],
)
"""
Base class for every configuration.
Conda = namedtuple('Conda', ['environment']) # noqa
Each inherited class should define
its attibutes in the `__slots__` attribute.
Sphinx = namedtuple( # noqa
'Sphinx',
['builder', 'configuration', 'fail_on_warning'],
)
We are using `__slots__` so we can't add more attributes by mistake,
this is similar to a namedtuple.
"""
Mkdocs = namedtuple( # noqa
'Mkdocs',
['configuration', 'fail_on_warning'],
)
def __init__(self, **kwargs):
for name in self.__slots__:
setattr(self, name, kwargs[name])
Submodules = namedtuple( # noqa
'Submodules',
['include', 'exclude', 'recursive'],
)
def as_dict(self):
return {
name: to_dict(getattr(self, name))
for name in self.__slots__
}
class Build(Base):
__slots__ = ('image',)
class Python(Base):
__slots__ = ('version', 'install', 'use_system_site_packages')
class PythonInstallRequirements(Base):
__slots__ = ('requirements',)
class PythonInstall(Base):
__slots__ = ('path', 'method', 'extra_requirements',)
class Conda(Base):
__slots__ = ('environment',)
class Sphinx(Base):
__slots__ = ('builder', 'configuration', 'fail_on_warning')
class Mkdocs(Base):
__slots__ = ('configuration', 'fail_on_warning')
class Submodules(Base):
__slots__ = ('include', 'exclude', 'recursive')

View File

@ -1,10 +1,10 @@
# -*- coding: utf-8 -*-
"""YAML parser for the RTD configuration file."""
from __future__ import division, print_function, unicode_literals
import yaml
__all__ = ('parse', 'ParseError')
@ -12,8 +12,6 @@ class ParseError(Exception):
"""Parser related errors."""
pass
def parse(stream):
"""

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,6 @@
from __future__ import division, print_function, unicode_literals
# -*- coding: utf-8 -*-
import os
import pytest
import six
from readthedocs.config.find import find_one
from .utils import apply_fs
@ -19,16 +17,3 @@ def test_find_at_root(tmpdir):
base = str(tmpdir)
path = find_one(base, r'readthedocs\.yml')
assert path == os.path.abspath(os.path.join(base, 'readthedocs.yml'))
@pytest.mark.skipif(not six.PY2, reason='Only for python2')
def test_find_unicode_path(tmpdir):
base_path = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'fixtures/bad_encode_project')
)
path = find_one(base_path, r'readthedocs\.yml')
assert path == ''
unicode_base_path = base_path.decode('utf-8')
assert isinstance(unicode_base_path, unicode)
path = find_one(unicode_base_path, r'readthedocs\.yml')
assert path == ''

View File

@ -1,5 +1,4 @@
from __future__ import division, print_function, unicode_literals
# -*- coding: utf-8 -*-
from io import StringIO
from pytest import raises
@ -8,63 +7,64 @@ from readthedocs.config.parser import ParseError, parse
def test_parse_empty_config_file():
buf = StringIO(u'')
buf = StringIO('')
with raises(ParseError):
parse(buf)
def test_parse_invalid_yaml():
buf = StringIO(u'- - !asdf')
buf = StringIO('- - !asdf')
with raises(ParseError):
parse(buf)
def test_parse_bad_type():
buf = StringIO(u'Hello')
buf = StringIO('Hello')
with raises(ParseError):
parse(buf)
def test_parse_single_config():
buf = StringIO(u'base: path')
buf = StringIO('base: path')
config = parse(buf)
assert isinstance(config, dict)
assert config['base'] == 'path'
def test_parse_null_value():
buf = StringIO(u'base: null')
buf = StringIO('base: null')
config = parse(buf)
assert config['base'] is None
def test_parse_empty_value():
buf = StringIO(u'base:')
buf = StringIO('base:')
config = parse(buf)
assert config['base'] is None
def test_parse_empty_string_value():
buf = StringIO(u'base: ""')
buf = StringIO('base: ""')
config = parse(buf)
assert config['base'] == ''
def test_parse_empty_list():
buf = StringIO(u'base: []')
buf = StringIO('base: []')
config = parse(buf)
assert config['base'] == []
def test_do_not_parse_multiple_configs_in_one_file():
buf = StringIO(
u'''
'''
base: path
---
base: other_path
name: second
nested:
works: true
''')
'''
)
with raises(ParseError):
parse(buf)

View File

@ -1,5 +1,4 @@
from __future__ import division, print_function, unicode_literals
# -*- coding: utf-8 -*-
from .utils import apply_fs

View File

@ -1,20 +1,29 @@
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
import os
from mock import patch
from pytest import raises
from six import text_type
from readthedocs.config.validation import (
INVALID_BOOL, INVALID_CHOICE, INVALID_DIRECTORY, INVALID_FILE, INVALID_LIST,
INVALID_PATH, INVALID_STRING, ValidationError, validate_bool,
validate_choice, validate_directory, validate_file, validate_list,
validate_path, validate_string)
INVALID_BOOL,
INVALID_CHOICE,
INVALID_DIRECTORY,
INVALID_FILE,
INVALID_LIST,
INVALID_PATH,
INVALID_STRING,
ValidationError,
validate_bool,
validate_choice,
validate_directory,
validate_file,
validate_list,
validate_path,
validate_string,
)
class TestValidateBool(object):
class TestValidateBool:
def test_it_accepts_true(self):
assert validate_bool(True) is True
@ -33,7 +42,7 @@ class TestValidateBool(object):
assert excinfo.value.code == INVALID_BOOL
class TestValidateChoice(object):
class TestValidateChoice:
def test_it_accepts_valid_choice(self):
result = validate_choice('choice', ('choice', 'another_choice'))
@ -49,7 +58,7 @@ class TestValidateChoice(object):
assert excinfo.value.code == INVALID_CHOICE
class TestValidateList(object):
class TestValidateList:
def test_it_accepts_list_types(self):
result = validate_list(['choice', 'another_choice'])
@ -70,16 +79,16 @@ class TestValidateList(object):
def test_it_rejects_string_types(self):
with raises(ValidationError) as excinfo:
result = validate_list('choice')
validate_list('choice')
assert excinfo.value.code == INVALID_LIST
class TestValidateDirectory(object):
class TestValidateDirectory:
def test_it_uses_validate_path(self, tmpdir):
patcher = patch('readthedocs.config.validation.validate_path')
with patcher as validate_path:
path = text_type(tmpdir.mkdir('a directory'))
path = str(tmpdir.mkdir('a directory'))
validate_path.return_value = path
validate_directory(path, str(tmpdir))
validate_path.assert_called_with(path, str(tmpdir))
@ -91,7 +100,7 @@ class TestValidateDirectory(object):
assert excinfo.value.code == INVALID_DIRECTORY
class TestValidateFile(object):
class TestValidateFile:
def test_it_uses_validate_path(self, tmpdir):
patcher = patch('readthedocs.config.validation.validate_path')
@ -110,7 +119,7 @@ class TestValidateFile(object):
assert excinfo.value.code == INVALID_FILE
class TestValidatePath(object):
class TestValidatePath:
def test_it_accepts_relative_path(self, tmpdir):
tmpdir.mkdir('a directory')
@ -140,15 +149,15 @@ class TestValidatePath(object):
assert excinfo.value.code == INVALID_PATH
class TestValidateString(object):
class TestValidateString:
def test_it_accepts_unicode(self):
result = validate_string(u'Unicöde')
assert isinstance(result, text_type)
result = validate_string('Unicöde')
assert isinstance(result, str)
def test_it_accepts_nonunicode(self):
result = validate_string('Unicode')
assert isinstance(result, text_type)
assert isinstance(result, str)
def test_it_rejects_float(self):
with raises(ValidationError) as excinfo:

View File

@ -1,11 +1,11 @@
from __future__ import division, print_function, unicode_literals
# -*- coding: utf-8 -*-
def apply_fs(tmpdir, contents):
"""
Create the directory structure specified in ``contents``. It's a dict of
filenames as keys and the file contents as values. If the value is another
dict, it's a subdirectory.
Create the directory structure specified in ``contents``.
It's a dict of filenames as keys and the file contents as values. If the
value is another dict, it's a subdirectory.
"""
for filename, content in contents.items():
if hasattr(content, 'items'):

View File

@ -0,0 +1,27 @@
"""Shared functions for the config module."""
def to_dict(value):
"""Recursively transform a class from `config.models` to a dict."""
if hasattr(value, 'as_dict'):
return value.as_dict()
if isinstance(value, list):
return [
to_dict(v)
for v in value
]
if isinstance(value, dict):
return {
k: to_dict(v)
for k, v in value.items()
}
return value
def list_to_dict(list_):
"""Transform a list to a dictionary with its indices as keys."""
dict_ = {
str(i): element
for i, element in enumerate(list_)
}
return dict_

View File

@ -1,9 +1,8 @@
"""Validations for the RTD configuration file."""
from __future__ import division, print_function, unicode_literals
# -*- coding: utf-8 -*-
"""Validations for the RTD configuration file."""
import os
from six import string_types, text_type
INVALID_BOOL = 'invalid-bool'
INVALID_CHOICE = 'invalid-choice'
@ -29,7 +28,7 @@ class ValidationError(Exception):
INVALID_PATH: 'path {value} does not exist',
INVALID_STRING: 'expected string',
INVALID_LIST: 'expected list',
VALUE_NOT_FOUND: '{value} not found'
VALUE_NOT_FOUND: '{value} not found',
}
def __init__(self, value, code, format_kwargs=None):
@ -41,12 +40,12 @@ class ValidationError(Exception):
if format_kwargs is not None:
defaults.update(format_kwargs)
message = self.messages[code].format(**defaults)
super(ValidationError, self).__init__(message)
super().__init__(message)
def validate_list(value):
"""Check if ``value`` is an iterable."""
if isinstance(value, (dict, string_types)):
if isinstance(value, (dict, str)):
raise ValidationError(value, INVALID_LIST)
if not hasattr(value, '__iter__'):
raise ValidationError(value, INVALID_LIST)
@ -63,9 +62,13 @@ def validate_choice(value, choices):
"""Check that ``value`` is in ``choices``."""
choices = validate_list(choices)
if value not in choices:
raise ValidationError(value, INVALID_CHOICE, {
'choices': ', '.join(map(str, choices))
})
raise ValidationError(
value,
INVALID_CHOICE,
{
'choices': ', '.join(map(str, choices)),
},
)
return value
@ -113,6 +116,6 @@ def validate_path(value, base_path):
def validate_string(value):
"""Check that ``value`` is a string type."""
if not isinstance(value, string_types):
if not isinstance(value, str):
raise ValidationError(value, INVALID_STRING)
return text_type(value)
return str(value)

View File

@ -1,6 +1,7 @@
"""Common constants"""
# -*- coding: utf-8 -*-
"""Common constants."""
from __future__ import absolute_import
from readthedocs.builds.version_slug import VERSION_SLUG_REGEX
from readthedocs.projects.constants import LANGUAGES_REGEX, PROJECT_SLUG_REGEX

View File

@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
"""App initialization."""
default_app_config = 'readthedocs.core.apps.CoreAppConfig'

View File

@ -1,6 +1,7 @@
"""Allauth overrides"""
# -*- coding: utf-8 -*-
"""Allauth overrides."""
from __future__ import absolute_import
import json
import logging
@ -9,6 +10,7 @@ from django.template.loader import render_to_string
from readthedocs.core.utils import send_email
try:
from django.utils.encoding import force_text
except ImportError:
@ -19,16 +21,17 @@ log = logging.getLogger(__name__)
class AccountAdapter(DefaultAccountAdapter):
"""Customize Allauth emails to match our current patterns"""
"""Customize Allauth emails to match our current patterns."""
def format_email_subject(self, subject):
return force_text(subject)
def send_mail(self, template_prefix, email, context):
subject = render_to_string(
'{0}_subject.txt'.format(template_prefix), context
'{}_subject.txt'.format(template_prefix),
context,
)
subject = " ".join(subject.splitlines()).strip()
subject = ' '.join(subject.splitlines()).strip()
subject = self.format_email_subject(subject)
# Allauth sends some additional data in the context, remove it if the
@ -41,13 +44,15 @@ class AccountAdapter(DefaultAccountAdapter):
removed_keys.append(key)
del context[key]
if removed_keys:
log.debug('Removed context we were unable to serialize: %s',
removed_keys)
log.debug(
'Removed context we were unable to serialize: %s',
removed_keys,
)
send_email(
recipient=email,
subject=subject,
template='{0}_message.txt'.format(template_prefix),
template_html='{0}_message.html'.format(template_prefix),
context=context
template='{}_message.txt'.format(template_prefix),
template_html='{}_message.html'.format(template_prefix),
context=context,
)

View File

@ -1,13 +1,14 @@
# -*- coding: utf-8 -*-
"""Django admin interface for core models."""
from __future__ import absolute_import
from datetime import timedelta
from django.contrib import admin
from django.contrib.auth.models import User
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from readthedocs.core.models import UserProfile
from readthedocs.projects.models import Project
@ -59,8 +60,14 @@ class UserAdminExtra(UserAdmin):
"""Admin configuration for User."""
list_display = ('username', 'email', 'first_name',
'last_name', 'is_staff', 'is_banned')
list_display = (
'username',
'email',
'first_name',
'last_name',
'is_staff',
'is_banned',
)
list_filter = (UserProjectFilter,) + UserAdmin.list_filter
actions = ['ban_user']
inlines = [UserProjectInline]

View File

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
"""App configurations for core app."""
from __future__ import absolute_import
from django.apps import AppConfig

View File

@ -1,10 +1,11 @@
# -*- coding: utf-8 -*-
"""Email backends for core app."""
from __future__ import absolute_import
import smtplib
from django.core.mail.utils import DNS_NAME
from django.core.mail.backends.smtp import EmailBackend
from django.core.mail.utils import DNS_NAME
class SSLEmailBackend(EmailBackend):
@ -13,8 +14,11 @@ class SSLEmailBackend(EmailBackend):
if self.connection:
return False
try:
self.connection = smtplib.SMTP_SSL(self.host, self.port,
local_hostname=DNS_NAME.get_fqdn())
self.connection = smtplib.SMTP_SSL(
self.host,
self.port,
local_hostname=DNS_NAME.get_fqdn(),
)
if self.username and self.password:
self.connection.login(self.username, self.password)
return True

View File

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
"""Template context processors for core app."""
from __future__ import absolute_import
from django.conf import settings
@ -11,10 +12,17 @@ def readthedocs_processor(request):
'PRODUCTION_DOMAIN': getattr(settings, 'PRODUCTION_DOMAIN', None),
'USE_SUBDOMAINS': getattr(settings, 'USE_SUBDOMAINS', None),
'GLOBAL_ANALYTICS_CODE': getattr(settings, 'GLOBAL_ANALYTICS_CODE'),
'DASHBOARD_ANALYTICS_CODE': getattr(settings, 'DASHBOARD_ANALYTICS_CODE'),
'DASHBOARD_ANALYTICS_CODE': getattr(
settings,
'DASHBOARD_ANALYTICS_CODE',
),
'SITE_ROOT': getattr(settings, 'SITE_ROOT', '') + '/',
'TEMPLATE_ROOT': getattr(settings, 'TEMPLATE_ROOT', '') + '/',
'DO_NOT_TRACK_ENABLED': getattr(settings, 'DO_NOT_TRACK_ENABLED', False),
'DO_NOT_TRACK_ENABLED': getattr(
settings,
'DO_NOT_TRACK_ENABLED',
False,
),
'USE_PROMOS': getattr(settings, 'USE_PROMOS', False),
}
return exports

View File

@ -1,10 +1,11 @@
"""Shared model fields and defaults"""
# -*- coding: utf-8 -*-
"""Shared model fields and defaults."""
from __future__ import absolute_import
import binascii
import os
def default_token():
"""Generate default value for token field"""
"""Generate default value for token field."""
return binascii.hexlify(os.urandom(20)).decode()

View File

@ -1,28 +1,28 @@
[
{
"pk": 1,
"model": "flagging.flagtype",
"pk": 1,
"model": "flagging.flagtype",
"fields": {
"description": "This item is inappropriate to the purpose of the site",
"slug": "inappropriate",
"description": "This item is inappropriate to the purpose of the site",
"slug": "inappropriate",
"title": "Inappropriate"
}
},
},
{
"pk": 2,
"model": "flagging.flagtype",
"pk": 2,
"model": "flagging.flagtype",
"fields": {
"description": "This item is spam",
"slug": "spam",
"description": "This item is spam",
"slug": "spam",
"title": "Spam"
}
},
},
{
"pk": 3,
"model": "flagging.flagtype",
"pk": 3,
"model": "flagging.flagtype",
"fields": {
"description": "These docs are a duplicate of other, official docs, on the site",
"slug": "duplicate",
"description": "These docs are a duplicate of other, official docs, on the site",
"slug": "duplicate",
"title": "Duplicate"
}
}

View File

@ -1,11 +1,8 @@
# -*- coding: utf-8 -*-
"""Forms for core app."""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
import logging
from builtins import object
from django import forms
from django.contrib.auth.models import User
@ -14,6 +11,7 @@ from django.utils.translation import ugettext_lazy as _
from .models import UserProfile
log = logging.getLogger(__name__)
@ -21,13 +19,13 @@ class UserProfileForm(forms.ModelForm):
first_name = CharField(label=_('First name'), required=False, max_length=30)
last_name = CharField(label=_('Last name'), required=False, max_length=30)
class Meta(object):
class Meta:
model = UserProfile
# Don't allow users edit someone else's user page
fields = ['first_name', 'last_name', 'homepage']
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
try:
self.fields['first_name'].initial = self.instance.user.first_name
self.fields['last_name'].initial = self.instance.user.last_name
@ -37,7 +35,7 @@ class UserProfileForm(forms.ModelForm):
def save(self, commit=True):
first_name = self.cleaned_data.pop('first_name', None)
last_name = self.cleaned_data.pop('last_name', None)
profile = super(UserProfileForm, self).save(commit=commit)
profile = super().save(commit=commit)
if commit:
user = profile.user
user.first_name = first_name
@ -52,7 +50,7 @@ class UserDeleteForm(forms.ModelForm):
help_text=_('Please type your username to confirm.'),
)
class Meta(object):
class Meta:
model = User
fields = ['username']
@ -66,7 +64,8 @@ class UserDeleteForm(forms.ModelForm):
class UserAdvertisingForm(forms.ModelForm):
class Meta(object):
class Meta:
model = UserProfile
fields = ['allow_ads']

View File

@ -1,15 +1,16 @@
"""Rebuild documentation for all projects"""
# -*- coding: utf-8 -*-
"""Rebuild documentation for all projects."""
from __future__ import absolute_import
from __future__ import print_function
from glob import glob
import os
import logging
import os
from glob import glob
from django.conf import settings
from django.core.management.base import BaseCommand
from django.template import loader as template_loader
log = logging.getLogger(__name__)
@ -21,10 +22,10 @@ class Command(BaseCommand):
doc_index = {}
os.chdir(settings.DOCROOT)
for directory in glob("*"):
for directory in glob('*'):
doc_index[directory] = []
path = os.path.join(directory, 'rtd-builds')
for version in glob(os.path.join(path, "*")):
for version in glob(os.path.join(path, '*')):
v = version.replace(path + '/', '')
doc_index[directory].append(v)
@ -32,5 +33,7 @@ class Command(BaseCommand):
'doc_index': doc_index,
'MEDIA_URL': settings.MEDIA_URL,
}
html = template_loader.get_template('archive/index.html').render(context)
html = template_loader.get_template(
'archive/index.html',
).render(context)
print(html)

View File

@ -1,9 +1,9 @@
"""Clean up stable build paths per project version"""
# -*- coding: utf-8 -*-
"""Clean up stable build paths per project version."""
from __future__ import absolute_import
from datetime import timedelta
import logging
from optparse import make_option
from datetime import timedelta
from django.core.management.base import BaseCommand
from django.db.models import Max
@ -11,6 +11,7 @@ from django.utils import timezone
from readthedocs.builds.models import Build, Version
log = logging.getLogger(__name__)
@ -24,24 +25,24 @@ class Command(BaseCommand):
dest='days',
type='int',
default=365,
help='Find builds older than DAYS days, default: 365'
help='Find builds older than DAYS days, default: 365',
)
parser.add_argument(
'--dryrun',
action='store_true',
dest='dryrun',
help='Perform dry run on build cleanup'
help='Perform dry run on build cleanup',
)
def handle(self, *args, **options):
"""Find stale builds and remove build paths"""
"""Find stale builds and remove build paths."""
max_date = timezone.now() - timedelta(days=options['days'])
queryset = (Build.objects
.values('project', 'version')
.annotate(max_date=Max('date'))
.filter(max_date__lt=max_date)
.order_by('-max_date'))
queryset = (
Build.objects.values('project', 'version').annotate(
max_date=Max('date'),
).filter(max_date__lt=max_date).order_by('-max_date')
)
for build in queryset:
try:
# Get version from build version id, perform sanity check on

View File

@ -1,8 +1,9 @@
"""Resync GitHub project for user"""
# -*- coding: utf-8 -*-
"""Resync GitHub project for user."""
from __future__ import absolute_import
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from readthedocs.oauth.services import GitHubService
@ -15,6 +16,8 @@ class Command(BaseCommand):
if args:
for slug in args:
for service in GitHubService.for_user(
User.objects.get(username=slug)
User.objects.get(
username=slug,
),
):
service.sync()

View File

@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
"""
Import a project's programming language from GitHub
Import a project's programming language from GitHub.
This builds a basic management command that will set
a projects language to the most used one in GitHub.
@ -8,16 +10,15 @@ Requires a ``GITHUB_AUTH_TOKEN`` to be set in the environment,
which should contain a proper GitHub Oauth Token for rate limiting.
"""
from __future__ import absolute_import
from __future__ import print_function
import os
import requests
from django.core.management.base import BaseCommand
from django.core.cache import cache
from django.core.management.base import BaseCommand
from readthedocs.projects.models import Project
from readthedocs.projects.constants import GITHUB_REGEXS, PROGRAMMING_LANGUAGES
from readthedocs.projects.models import Project
PL_DICT = {}
@ -36,11 +37,7 @@ class Command(BaseCommand):
print('Invalid GitHub token, exiting')
return
for project in Project.objects.filter(
programming_language__in=['none', '', 'words']
).filter(
repo__contains='github'
):
for project in Project.objects.filter(programming_language__in=['none', '', 'words']).filter(repo__contains='github'): # noqa
user = repo = ''
repo_url = project.repo
for regex in GITHUB_REGEXS:
@ -53,7 +50,7 @@ class Command(BaseCommand):
print('No GitHub repo for %s' % repo_url)
continue
cache_key = '%s-%s' % (user, repo)
cache_key = '{}-{}'.format(user, repo)
top_lang = cache.get(cache_key, None)
if not top_lang:
url = 'https://api.github.com/repos/{user}/{repo}/languages'.format(
@ -66,15 +63,21 @@ class Command(BaseCommand):
languages = resp.json()
if not languages:
continue
sorted_langs = sorted(list(languages.items()), key=lambda x: x[1], reverse=True)
sorted_langs = sorted(
list(languages.items()),
key=lambda x: x[1],
reverse=True,
)
print('Sorted langs: %s ' % sorted_langs)
top_lang = sorted_langs[0][0]
else:
print('Cached top_lang: %s' % top_lang)
if top_lang in PL_DICT:
slug = PL_DICT[top_lang]
print('Setting %s to %s' % (repo_url, slug))
Project.objects.filter(pk=project.pk).update(programming_language=slug)
print('Setting {} to {}'.format(repo_url, slug))
Project.objects.filter(
pk=project.pk,
).update(programming_language=slug)
else:
print('Language unknown: %s' % top_lang)
cache.set(cache_key, top_lang, 60 * 600)

View File

@ -1,6 +1,7 @@
"""Trigger build for project slug"""
# -*- coding: utf-8 -*-
"""Trigger build for project slug."""
from __future__ import absolute_import
import logging
from django.core.management.base import BaseCommand

View File

@ -1,13 +1,15 @@
"""Generate metadata for all projects"""
# -*- coding: utf-8 -*-
"""Generate metadata for all projects."""
from __future__ import absolute_import
import logging
from django.core.management.base import BaseCommand
from readthedocs.core.utils import broadcast
from readthedocs.projects import tasks
from readthedocs.projects.models import Project
from readthedocs.core.utils import broadcast
log = logging.getLogger(__name__)
@ -19,8 +21,12 @@ class Command(BaseCommand):
def handle(self, *args, **options):
queryset = Project.objects.all()
for p in queryset:
log.info("Generating metadata for %s", p)
log.info('Generating metadata for %s', p)
try:
broadcast(type='app', task=tasks.update_static_metadata, args=[p.pk])
broadcast(
type='app',
task=tasks.update_static_metadata,
args=[p.pk],
)
except Exception:
log.exception('Build failed for %s', p)

View File

@ -1,14 +1,15 @@
"""Update symlinks for projects"""
# -*- coding: utf-8 -*-
"""Update symlinks for projects."""
from __future__ import absolute_import
import logging
from django.core.management.base import BaseCommand
from readthedocs.projects import tasks
from readthedocs.projects.models import Project
log = logging.getLogger(__name__)
@ -24,7 +25,9 @@ class Command(BaseCommand):
if 'all' in projects:
pks = Project.objects.values_list('pk', flat=True)
else:
pks = Project.objects.filter(slug__in=projects).values_list('pk', flat=True)
pks = Project.objects.filter(
slug__in=projects,
).values_list('pk', flat=True)
for proj in pks:
try:
tasks.symlink_project(project_pk=proj)

View File

@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
"""
Build documentation using the API and not hitting a database.
@ -6,7 +8,6 @@ Usage::
./manage.py update_api <slug>
"""
from __future__ import absolute_import
import logging
from django.core.management.base import BaseCommand
@ -32,6 +33,6 @@ class Command(BaseCommand):
for slug in options['projects']:
project_data = api.project(slug).get()
p = APIProject(**project_data)
log.info("Building %s", p)
log.info('Building %s', p)
# pylint: disable=no-value-for-parameter
tasks.update_docs_task(p.pk, docker=docker)

View File

@ -6,9 +6,6 @@ Custom management command to rebuild documentation for all projects.
Invoked via ``./manage.py update_repos``.
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
import logging
from django.core.management.base import BaseCommand
@ -18,6 +15,7 @@ from readthedocs.core.utils import trigger_build
from readthedocs.projects import tasks
from readthedocs.projects.models import Project
log = logging.getLogger(__name__)

View File

@ -1,6 +1,7 @@
"""Rebuild documentation for all projects"""
# -*- coding: utf-8 -*-
"""Rebuild documentation for all projects."""
from __future__ import absolute_import
from django.core.management.base import BaseCommand
from readthedocs.builds.models import Version
@ -17,5 +18,5 @@ class Command(BaseCommand):
update_docs_task(
version.project_id,
record=False,
version_pk=version.pk
version_pk=version.pk,
)

View File

@ -1,7 +1,6 @@
"""Middleware for core app."""
# -*- coding: utf-8 -*-
from __future__ import (
absolute_import, division, print_function, unicode_literals)
"""Middleware for core app."""
import logging
@ -9,26 +8,27 @@ from django.conf import settings
from django.contrib.sessions.middleware import SessionMiddleware
from django.core.cache import cache
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from django.urls.base import get_urlconf, set_urlconf
from django.http import Http404, HttpResponseBadRequest
from django.urls.base import set_urlconf
from django.utils.deprecation import MiddlewareMixin
from django.utils.translation import ugettext_lazy as _
from readthedocs.core.utils import cname_to_slug
from readthedocs.projects.models import Domain, Project
log = logging.getLogger(__name__)
LOG_TEMPLATE = u"(Middleware) {msg} [{host}{path}]"
LOG_TEMPLATE = '(Middleware) {msg} [{host}{path}]'
SUBDOMAIN_URLCONF = getattr(
settings,
'SUBDOMAIN_URLCONF',
'readthedocs.core.urls.subdomain'
'readthedocs.core.urls.subdomain',
)
SINGLE_VERSION_URLCONF = getattr(
settings,
'SINGLE_VERSION_URLCONF',
'readthedocs.core.urls.single_version'
'readthedocs.core.urls.single_version',
)
@ -54,7 +54,7 @@ class SubdomainMiddleware(MiddlewareMixin):
production_domain = getattr(
settings,
'PRODUCTION_DOMAIN',
'readthedocs.org'
'readthedocs.org',
)
if public_domain is None:
@ -67,9 +67,8 @@ class SubdomainMiddleware(MiddlewareMixin):
if len(domain_parts) == len(public_domain.split('.')) + 1:
subdomain = domain_parts[0]
is_www = subdomain.lower() == 'www'
if not is_www and (
# Support ports during local dev
public_domain in host or public_domain in full_host
if not is_www and ( # Support ports during local dev
public_domain in host or public_domain in full_host
):
if not Project.objects.filter(slug=subdomain).exists():
raise Http404(_('Project not found'))
@ -79,10 +78,10 @@ class SubdomainMiddleware(MiddlewareMixin):
return None
# Serve CNAMEs
if (public_domain not in host and
production_domain not in host and
'localhost' not in host and
'testserver' not in host):
if (
public_domain not in host and production_domain not in host and
'localhost' not in host and 'testserver' not in host
):
request.cname = True
domains = Domain.objects.filter(domain=host)
if domains.count():
@ -91,18 +90,26 @@ class SubdomainMiddleware(MiddlewareMixin):
request.slug = domain.project.slug
request.urlconf = SUBDOMAIN_URLCONF
request.domain_object = True
log.debug(LOG_TEMPLATE.format(
msg='Domain Object Detected: %s' % domain.domain,
**log_kwargs))
log.debug(
LOG_TEMPLATE.format(
msg='Domain Object Detected: %s' % domain.domain,
**log_kwargs
),
)
break
if (not hasattr(request, 'domain_object') and
'HTTP_X_RTD_SLUG' in request.META):
if (
not hasattr(request, 'domain_object') and
'HTTP_X_RTD_SLUG' in request.META
):
request.slug = request.META['HTTP_X_RTD_SLUG'].lower()
request.urlconf = SUBDOMAIN_URLCONF
request.rtdheader = True
log.debug(LOG_TEMPLATE.format(
msg='X-RTD-Slug header detected: %s' % request.slug,
**log_kwargs))
log.debug(
LOG_TEMPLATE.format(
msg='X-RTD-Slug header detected: %s' % request.slug,
**log_kwargs
),
)
# Try header first, then DNS
elif not hasattr(request, 'domain_object'):
try:
@ -111,26 +118,39 @@ class SubdomainMiddleware(MiddlewareMixin):
slug = cname_to_slug(host)
cache.set(host, slug, 60 * 60)
# Cache the slug -> host mapping permanently.
log.info(LOG_TEMPLATE.format(
msg='CNAME cached: %s->%s' % (slug, host),
**log_kwargs))
log.info(
LOG_TEMPLATE.format(
msg='CNAME cached: {}->{}'.format(slug, host),
**log_kwargs
),
)
request.slug = slug
request.urlconf = SUBDOMAIN_URLCONF
log.warning(LOG_TEMPLATE.format(
msg='CNAME detected: %s' % request.slug,
**log_kwargs))
log.warning(
LOG_TEMPLATE.format(
msg='CNAME detected: %s' % request.slug,
**log_kwargs
),
)
except: # noqa
# Some crazy person is CNAMEing to us. 404.
log.warning(LOG_TEMPLATE.format(msg='CNAME 404', **log_kwargs))
log.warning(
LOG_TEMPLATE.format(msg='CNAME 404', **log_kwargs),
)
raise Http404(_('Invalid hostname'))
# Google was finding crazy www.blah.readthedocs.org domains.
# Block these explicitly after trying CNAME logic.
if len(domain_parts) > 3 and not settings.DEBUG:
# Stop www.fooo.readthedocs.org
if domain_parts[0] == 'www':
log.debug(LOG_TEMPLATE.format(msg='404ing long domain', **log_kwargs))
log.debug(
LOG_TEMPLATE.format(msg='404ing long domain', **log_kwargs),
)
return HttpResponseBadRequest(_('Invalid hostname'))
log.debug(LOG_TEMPLATE.format(msg='Allowing long domain name', **log_kwargs))
log.debug(
LOG_TEMPLATE
.format(msg='Allowing long domain name', **log_kwargs),
)
# raise Http404(_('Invalid hostname'))
# Normal request.
return None
@ -188,8 +208,9 @@ class SingleVersionMiddleware(MiddlewareMixin):
host = request.get_host()
path = request.get_full_path()
log_kwargs = dict(host=host, path=path)
log.debug(LOG_TEMPLATE.format(
msg='Handling single_version request', **log_kwargs)
log.debug(
LOG_TEMPLATE.
format(msg='Handling single_version request', **log_kwargs),
)
return None
@ -219,7 +240,7 @@ class ProxyMiddleware(MiddlewareMixin):
else:
# HTTP_X_FORWARDED_FOR can be a comma-separated list of IPs. The
# client's IP will be the first one.
real_ip = real_ip.split(",")[0].strip()
real_ip = real_ip.split(',')[0].strip()
request.META['REMOTE_ADDR'] = real_ip
@ -231,20 +252,26 @@ class FooterNoSessionMiddleware(SessionMiddleware):
This will reduce the size of our session table drastically.
"""
IGNORE_URLS = ['/api/v2/footer_html', '/sustainability/view', '/sustainability/click']
IGNORE_URLS = [
'/api/v2/footer_html', '/sustainability/view', '/sustainability/click',
]
def process_request(self, request):
for url in self.IGNORE_URLS:
if (request.path_info.startswith(url) and
settings.SESSION_COOKIE_NAME not in request.COOKIES):
if (
request.path_info.startswith(url) and
settings.SESSION_COOKIE_NAME not in request.COOKIES
):
# Hack request.session otherwise the Authentication middleware complains.
request.session = {}
return
super(FooterNoSessionMiddleware, self).process_request(request)
super().process_request(request)
def process_response(self, request, response):
for url in self.IGNORE_URLS:
if (request.path_info.startswith(url) and
settings.SESSION_COOKIE_NAME not in request.COOKIES):
if (
request.path_info.startswith(url) and
settings.SESSION_COOKIE_NAME not in request.COOKIES
):
return response
return super(FooterNoSessionMiddleware, self).process_response(request, response)
return super().process_response(request, response)

View File

@ -1,9 +1,6 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import models, migrations
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):

View File

@ -1,9 +1,6 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import models, migrations
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):

View File

@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import models, migrations
from django.db import migrations, models
class Migration(migrations.Migration):

View File

@ -1,11 +1,9 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-06-14 18:06
from __future__ import unicode_literals
import annoying.fields
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):

View File

@ -1,9 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-11 17:28
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth.hashers import make_password
from django.db import migrations
def forwards_func(apps, schema_editor):

View File

@ -1,24 +1,24 @@
"""Common mixin classes for views"""
# -*- coding: utf-8 -*-
"""Common mixin classes for views."""
from __future__ import absolute_import
from builtins import object
from vanilla import ListView
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from vanilla import ListView
class ListViewWithForm(ListView):
"""List view that also exposes a create form"""
"""List view that also exposes a create form."""
def get_context_data(self, **kwargs):
context = super(ListViewWithForm, self).get_context_data(**kwargs)
context = super().get_context_data(**kwargs)
context['form'] = self.get_form(data=None, files=None)
return context
class LoginRequiredMixin(object):
class LoginRequiredMixin:
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(LoginRequiredMixin, self).dispatch(*args, **kwargs)
return super().dispatch(*args, **kwargs)

View File

@ -1,16 +1,15 @@
# -*- coding: utf-8 -*-
"""Models for the core app."""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
"""Models for the core app."""
import logging
from annoying.fields import AutoOneToOneField
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext
from django.urls import reverse
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext
from django.utils.translation import ugettext_lazy as _
STANDARD_EMAIL = 'anonymous@readthedocs.org'
@ -23,7 +22,10 @@ class UserProfile(models.Model):
"""Additional information about a User."""
user = AutoOneToOneField(
'auth.User', verbose_name=_('User'), related_name='profile')
'auth.User',
verbose_name=_('User'),
related_name='profile',
)
whitelisted = models.BooleanField(_('Whitelisted'), default=False)
banned = models.BooleanField(_('Banned'), default=False)
homepage = models.CharField(_('Homepage'), max_length=100, blank=True)
@ -41,10 +43,14 @@ class UserProfile(models.Model):
def __str__(self):
return (
ugettext("%(username)s's profile") %
{'username': self.user.username})
{'username': self.user.username}
)
def get_absolute_url(self):
return reverse('profiles_profile_detail', kwargs={'username': self.user.username})
return reverse(
'profiles_profile_detail',
kwargs={'username': self.user.username},
)
def get_contribution_details(self):
"""

View File

@ -1,11 +1,11 @@
"""Objects for User permission checks"""
# -*- coding: utf-8 -*-
from __future__ import absolute_import
"""Objects for User permission checks."""
from readthedocs.core.utils.extend import SettingsOverrideObject
class AdminPermissionBase(object):
class AdminPermissionBase:
@classmethod
def is_admin(cls, user, project):

View File

@ -1,16 +1,16 @@
# -*- coding: utf-8 -*-
"""URL resolver for documentation."""
from __future__ import absolute_import
from builtins import object
import re
from django.conf import settings
from readthedocs.projects.constants import PRIVATE, PUBLIC
from readthedocs.core.utils.extend import SettingsOverrideObject
from readthedocs.projects.constants import PRIVATE, PUBLIC
class ResolverBase(object):
class ResolverBase:
"""
Read the Docs URL Resolver.
@ -51,35 +51,55 @@ class ResolverBase(object):
/docs/<project_slug>/projects/<subproject_slug>/<filename>
"""
def base_resolve_path(self, project_slug, filename, version_slug=None,
language=None, private=False, single_version=None,
subproject_slug=None, subdomain=None, cname=None):
def base_resolve_path(
self,
project_slug,
filename,
version_slug=None,
language=None,
private=False,
single_version=None,
subproject_slug=None,
subdomain=None,
cname=None,
):
"""Resolve a with nothing smart, just filling in the blanks."""
# Only support `/docs/project' URLs outside our normal environment. Normally
# the path should always have a subdomain or CNAME domain
# pylint: disable=unused-argument
if subdomain or cname or (self._use_subdomain()):
url = u'/'
url = '/'
else:
url = u'/docs/{project_slug}/'
url = '/docs/{project_slug}/'
if subproject_slug:
url += u'projects/{subproject_slug}/'
url += 'projects/{subproject_slug}/'
if single_version:
url += u'{filename}'
url += '{filename}'
else:
url += u'{language}/{version_slug}/{filename}'
url += '{language}/{version_slug}/{filename}'
return url.format(
project_slug=project_slug, filename=filename,
version_slug=version_slug, language=language,
single_version=single_version, subproject_slug=subproject_slug,
project_slug=project_slug,
filename=filename,
version_slug=version_slug,
language=language,
single_version=single_version,
subproject_slug=subproject_slug,
)
def resolve_path(self, project, filename='', version_slug=None,
language=None, single_version=None, subdomain=None,
cname=None, private=None):
def resolve_path(
self,
project,
filename='',
version_slug=None,
language=None,
single_version=None,
subdomain=None,
cname=None,
private=None,
):
"""Resolve a URL with a subset of fields defined."""
cname = cname or project.domains.filter(canonical=True).first()
version_slug = version_slug or project.get_default_version()
@ -138,8 +158,10 @@ class ResolverBase(object):
return getattr(settings, 'PRODUCTION_DOMAIN')
def resolve(self, project, require_https=False, filename='', private=None,
**kwargs):
def resolve(
self, project, require_https=False, filename='', private=None,
**kwargs
):
if private is None:
version_slug = kwargs.get('version_slug')
if version_slug is None:
@ -173,8 +195,8 @@ class ResolverBase(object):
return '{protocol}://{domain}{path}'.format(
protocol=protocol,
domain=domain,
path=self.resolve_path(project, filename=filename, private=private,
**kwargs),
path=self.
resolve_path(project, filename=filename, private=private, **kwargs),
)
def _get_canonical_project(self, project, projects=None):
@ -212,7 +234,7 @@ class ResolverBase(object):
if self._use_subdomain():
project = self._get_canonical_project(project)
subdomain_slug = project.slug.replace('_', '-')
return "%s.%s" % (subdomain_slug, public_domain)
return '{}.{}'.format(subdomain_slug, public_domain)
def _get_project_custom_domain(self, project):
return project.domains.filter(canonical=True).first()
@ -223,7 +245,11 @@ class ResolverBase(object):
version = project.versions.get(slug=version_slug)
private = version.privacy_level == PRIVATE
except Version.DoesNotExist:
private = getattr(settings, 'DEFAULT_PRIVACY_LEVEL', PUBLIC) == PRIVATE
private = getattr(
settings,
'DEFAULT_PRIVACY_LEVEL',
PUBLIC,
) == PRIVATE
return private
def _fix_filename(self, project, filename):
@ -241,17 +267,17 @@ class ResolverBase(object):
if filename:
if filename.endswith('/') or filename.endswith('.html'):
path = filename
elif project.documentation_type == "sphinx_singlehtml":
path = "index.html#document-" + filename
elif project.documentation_type in ["sphinx_htmldir", "mkdocs"]:
path = filename + "/"
elif project.documentation_type == 'sphinx_singlehtml':
path = 'index.html#document-' + filename
elif project.documentation_type in ['sphinx_htmldir', 'mkdocs']:
path = filename + '/'
elif '#' in filename:
# do nothing if the filename contains URL fragments
path = filename
else:
path = filename + ".html"
path = filename + '.html'
else:
path = ""
path = ''
return path
def _use_custom_domain(self, custom_domain):

View File

@ -1,12 +1,10 @@
"""Class based settings for complex settings inheritance."""
from __future__ import absolute_import
from builtins import object
import inspect
import sys
class Settings(object):
class Settings:
"""Class-based settings wrapper."""

View File

@ -2,21 +2,19 @@
"""Signal handling for core app."""
from __future__ import absolute_import
import logging
from urllib.parse import urlparse
from corsheaders import signals
from django.conf import settings
from django.db.models import Count, Q
from django.db.models.signals import pre_delete
from django.dispatch import Signal
from django.db.models import Q, Count
from django.dispatch import receiver
from future.backports.urllib.parse import urlparse
from django.dispatch import Signal, receiver
from rest_framework.permissions import SAFE_METHODS
from readthedocs.oauth.models import RemoteOrganization
from readthedocs.projects.models import Project, Domain
from readthedocs.projects.models import Domain, Project
log = logging.getLogger(__name__)
@ -92,15 +90,17 @@ def delete_projects_and_organizations(sender, instance, *args, **kwargs):
# https://github.com/rtfd/readthedocs.org/pull/4577
# https://docs.djangoproject.com/en/2.1/topics/db/aggregation/#order-of-annotate-and-filter-clauses # noqa
projects = (
Project.objects.annotate(num_users=Count('users'))
.filter(users=instance.id).exclude(num_users__gt=1)
Project.objects.annotate(num_users=Count('users')
).filter(users=instance.id
).exclude(num_users__gt=1)
)
# Here we count the users list from the organization that the user belong
# Then exclude the organizations where there are more than one user
oauth_organizations = (
RemoteOrganization.objects.annotate(num_users=Count('users'))
.filter(users=instance.id).exclude(num_users__gt=1)
RemoteOrganization.objects.annotate(num_users=Count('users')
).filter(users=instance.id
).exclude(num_users__gt=1)
)
projects.delete()

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
from django.contrib.staticfiles.finders import FileSystemFinder
@ -15,4 +13,4 @@ class SelectiveFileSystemFinder(FileSystemFinder):
def list(self, ignore_patterns):
ignore_patterns.extend(['epub', 'pdf', 'htmlzip', 'json', 'man'])
return super(SelectiveFileSystemFinder, self).list(ignore_patterns)
return super().list(ignore_patterns)

File diff suppressed because it is too large Load Diff

Before

Width:  |  Height:  |  Size: 434 KiB

After

Width:  |  Height:  |  Size: 433 KiB

View File

@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
"""
A class that manages the symlinks for nginx to serve public files.
@ -52,19 +54,11 @@ Example layout
fabric -> rtd-builds/fabric/en/latest/ # single version
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import logging
import os
import shutil
from collections import OrderedDict
from builtins import object
from django.conf import settings
from readthedocs.builds.models import Version
@ -74,20 +68,23 @@ from readthedocs.doc_builder.environments import LocalEnvironment
from readthedocs.projects import constants
from readthedocs.projects.models import Domain
log = logging.getLogger(__name__)
class Symlink(object):
class Symlink:
"""Base class for symlinking of projects."""
def __init__(self, project):
self.project = project
self.project_root = os.path.join(
self.WEB_ROOT, project.slug
self.WEB_ROOT,
project.slug,
)
self.subproject_root = os.path.join(
self.project_root, 'projects'
self.project_root,
'projects',
)
self.environment = LocalEnvironment(project)
self.sanity_check()
@ -99,9 +96,13 @@ class Symlink(object):
This will leave it in the proper state for the single_project setting.
"""
if os.path.islink(self.project_root) and not self.project.single_version:
log.info(constants.LOG_TEMPLATE.format(
project=self.project.slug, version='',
msg="Removing single version symlink"))
log.info(
constants.LOG_TEMPLATE.format(
project=self.project.slug,
version='',
msg='Removing single version symlink',
),
)
safe_unlink(self.project_root)
safe_makedirs(self.project_root)
elif (self.project.single_version and
@ -151,44 +152,59 @@ class Symlink(object):
if domain:
domains = [domain]
else:
domains = Domain.objects.filter(project=self.project)
domains = Domain.objects.filter(project=self.project).values_list('domain', flat=True)
for dom in domains:
log_msg = 'Symlinking CNAME: {} -> {}'.format(
dom.domain, self.project.slug
dom,
self.project.slug,
)
log.info(
constants.LOG_TEMPLATE.format(
project=self.project.slug,
version='', msg=log_msg
)
version='',
msg=log_msg,
),
)
# CNAME to doc root
symlink = os.path.join(self.CNAME_ROOT, dom.domain)
symlink = os.path.join(self.CNAME_ROOT, dom)
self.environment.run('ln', '-nsf', self.project_root, symlink)
# Project symlink
project_cname_symlink = os.path.join(
self.PROJECT_CNAME_ROOT, dom.domain
self.PROJECT_CNAME_ROOT,
dom,
)
self.environment.run(
'ln', '-nsf', self.project.doc_path, project_cname_symlink
'ln',
'-nsf',
self.project.doc_path,
project_cname_symlink,
)
def remove_symlink_cname(self, domain):
"""Remove CNAME symlink."""
log_msg = "Removing symlink for CNAME {0}".format(domain.domain)
log.info(constants.LOG_TEMPLATE.format(project=self.project.slug,
version='', msg=log_msg))
symlink = os.path.join(self.CNAME_ROOT, domain.domain)
"""
Remove CNAME symlink.
:param domain: domain for which symlink is to be removed
:type domain: str
"""
log_msg = 'Removing symlink for CNAME {}'.format(domain)
log.info(
constants.LOG_TEMPLATE.format(
project=self.project.slug,
version='',
msg=log_msg
),
)
symlink = os.path.join(self.CNAME_ROOT, domain)
safe_unlink(symlink)
def symlink_subprojects(self):
"""
Symlink project subprojects.
Link from $WEB_ROOT/projects/<project> ->
$WEB_ROOT/<project>
Link from $WEB_ROOT/projects/<project> -> $WEB_ROOT/<project>
"""
subprojects = set()
rels = self.get_subprojects()
@ -205,12 +221,21 @@ class Symlink(object):
from_to[rel.alias] = rel.child.slug
subprojects.add(rel.alias)
for from_slug, to_slug in list(from_to.items()):
log_msg = "Symlinking subproject: {0} -> {1}".format(from_slug, to_slug)
log.info(constants.LOG_TEMPLATE.format(project=self.project.slug,
version='', msg=log_msg))
log_msg = 'Symlinking subproject: {} -> {}'.format(
from_slug,
to_slug,
)
log.info(
constants.LOG_TEMPLATE.format(
project=self.project.slug,
version='',
msg=log_msg,
),
)
symlink = os.path.join(self.subproject_root, from_slug)
docs_dir = os.path.join(
self.WEB_ROOT, to_slug
self.WEB_ROOT,
to_slug,
)
symlink_dir = os.sep.join(symlink.split(os.path.sep)[:-1])
if not os.path.lexists(symlink_dir):
@ -222,7 +247,8 @@ class Symlink(object):
if result.exit_code > 0:
log.error(
'Could not symlink path: status=%d error=%s',
result.exit_code, result.error
result.exit_code,
result.error,
)
# Remove old symlinks
@ -236,7 +262,7 @@ class Symlink(object):
Symlink project translations.
Link from $WEB_ROOT/<project>/<language>/ ->
$WEB_ROOT/<translation>/<language>/
$WEB_ROOT/<translation>/<language>/
"""
translations = {}
@ -256,8 +282,9 @@ class Symlink(object):
log.info(
constants.LOG_TEMPLATE.format(
project=self.project.slug,
version='', msg=log_msg
)
version='',
msg=log_msg,
),
)
symlink = os.path.join(self.project_root, language)
docs_dir = os.path.join(self.WEB_ROOT, slug, language)
@ -277,8 +304,9 @@ class Symlink(object):
"""
Symlink project single version.
Link from $WEB_ROOT/<project> ->
HOME/user_builds/<project>/rtd-builds/latest/
Link from:
$WEB_ROOT/<project> -> HOME/user_builds/<project>/rtd-builds/latest/
"""
version = self.get_default_version()
@ -295,7 +323,7 @@ class Symlink(object):
settings.DOCROOT,
self.project.slug,
'rtd-builds',
version.slug
version.slug,
)
self.environment.run('ln', '-nsf', docs_dir, symlink)
@ -304,11 +332,13 @@ class Symlink(object):
Symlink project's versions.
Link from $WEB_ROOT/<project>/<language>/<version>/ ->
HOME/user_builds/<project>/rtd-builds/<version>
HOME/user_builds/<project>/rtd-builds/<version>
"""
versions = set()
version_dir = os.path.join(
self.WEB_ROOT, self.project.slug, self.project.language
self.WEB_ROOT,
self.project.slug,
self.project.language,
)
# Include active public versions,
# as well as public versions that are built but not active, for archived versions
@ -322,15 +352,15 @@ class Symlink(object):
constants.LOG_TEMPLATE.format(
project=self.project.slug,
version='',
msg=log_msg
)
msg=log_msg,
),
)
symlink = os.path.join(version_dir, version.slug)
docs_dir = os.path.join(
settings.DOCROOT,
self.project.slug,
'rtd-builds',
version.slug
version.slug,
)
self.environment.run('ln', '-nsf', docs_dir, symlink)
versions.add(version.slug)
@ -353,11 +383,18 @@ class Symlink(object):
class PublicSymlinkBase(Symlink):
CNAME_ROOT = os.path.join(settings.SITE_ROOT, 'public_cname_root')
WEB_ROOT = os.path.join(settings.SITE_ROOT, 'public_web_root')
PROJECT_CNAME_ROOT = os.path.join(settings.SITE_ROOT, 'public_cname_project')
PROJECT_CNAME_ROOT = os.path.join(
settings.SITE_ROOT,
'public_cname_project',
)
def get_version_queryset(self):
return (self.project.versions.protected(only_active=False).filter(built=True) |
self.project.versions.protected(only_active=True))
return (
self.project.versions.protected(
only_active=False,
).filter(built=True) |
self.project.versions.protected(only_active=True)
)
def get_subprojects(self):
return self.project.subprojects.protected()
@ -369,11 +406,16 @@ class PublicSymlinkBase(Symlink):
class PrivateSymlinkBase(Symlink):
CNAME_ROOT = os.path.join(settings.SITE_ROOT, 'private_cname_root')
WEB_ROOT = os.path.join(settings.SITE_ROOT, 'private_web_root')
PROJECT_CNAME_ROOT = os.path.join(settings.SITE_ROOT, 'private_cname_project')
PROJECT_CNAME_ROOT = os.path.join(
settings.SITE_ROOT,
'private_cname_project',
)
def get_version_queryset(self):
return (self.project.versions.private(only_active=False).filter(built=True) |
self.project.versions.private(only_active=True))
return (
self.project.versions.private(only_active=False).filter(built=True) |
self.project.versions.private(only_active=True)
)
def get_subprojects(self):
return self.project.subprojects.private()

View File

@ -1,12 +1,13 @@
# -*- coding: utf-8 -*-
"""Basic tasks."""
from __future__ import absolute_import
import logging
from django.conf import settings
from django.core.mail import EmailMultiAlternatives
from django.template.loader import get_template
from django.template import TemplateDoesNotExist
from django.template.loader import get_template
from django.utils import timezone
from messages_extends.models import Message as PersistentMessage
@ -19,8 +20,10 @@ EMAIL_TIME_LIMIT = 30
@app.task(queue='web', time_limit=EMAIL_TIME_LIMIT)
def send_email_task(recipient, subject, template, template_html,
context=None, from_email=None, **kwargs):
def send_email_task(
recipient, subject, template, template_html, context=None,
from_email=None, **kwargs
):
"""
Send multipart email.
@ -44,14 +47,15 @@ def send_email_task(recipient, subject, template, template_html,
"""
msg = EmailMultiAlternatives(
subject,
get_template(template).render(context),
from_email or settings.DEFAULT_FROM_EMAIL,
[recipient],
**kwargs
get_template(template).render(context), from_email or
settings.DEFAULT_FROM_EMAIL,
[recipient], **kwargs
)
try:
msg.attach_alternative(get_template(template_html).render(context),
'text/html')
msg.attach_alternative(
get_template(template_html).render(context),
'text/html',
)
except TemplateDoesNotExist:
pass
msg.send()
@ -62,5 +66,7 @@ def send_email_task(recipient, subject, template, template_html,
def clear_persistent_messages():
# Delete all expired message_extend's messages
log.info("Deleting all expired message_extend's messages")
expired_messages = PersistentMessage.objects.filter(expires__lt=timezone.now())
expired_messages = PersistentMessage.objects.filter(
expires__lt=timezone.now(),
)
expired_messages.delete()

View File

@ -1,15 +1,14 @@
# -*- coding: utf-8 -*-
"""Template tags for core app."""
from __future__ import absolute_import
import hashlib
from urllib.parse import urlencode
from builtins import str # pylint: disable=redefined-builtin
from django import template
from django.conf import settings
from django.utils.encoding import force_bytes, force_text
from django.utils.safestring import mark_safe
from future.backports.urllib.parse import urlencode
from readthedocs import __version__
from readthedocs.core.resolver import resolve
@ -22,23 +21,25 @@ register = template.Library()
@register.filter
def gravatar(email, size=48):
"""
Hacked from djangosnippets.org, but basically given an email address
Hacked from djangosnippets.org, but basically given an email address.
render an img tag with the hashed up bits needed for leetness
omgwtfstillreading
"""
url = "http://www.gravatar.com/avatar.php?%s" % urlencode({
url = 'http://www.gravatar.com/avatar.php?%s' % urlencode({
'gravatar_id': hashlib.md5(email).hexdigest(),
'size': str(size)
'size': str(size),
})
return ('<img src="%s" width="%s" height="%s" alt="gravatar" '
'class="gravatar" border="0" />' % (url, size, size))
return (
'<img src="%s" width="%s" height="%s" alt="gravatar" '
'class="gravatar" border="0" />' % (url, size, size)
)
@register.simple_tag(name="doc_url")
@register.simple_tag(name='doc_url')
def make_document_url(project, version=None, page=''):
if not project:
return ""
return ''
return resolve(project=project, version_slug=version, filename=page)
@ -51,7 +52,7 @@ def restructuredtext(value, short=False):
if settings.DEBUG:
raise template.TemplateSyntaxError(
"Error in 'restructuredtext' filter: "
"The Python docutils library isn't installed."
"The Python docutils library isn't installed.",
)
return force_text(value)
else:
@ -59,20 +60,22 @@ def restructuredtext(value, short=False):
'raw_enabled': False,
'file_insertion_enabled': False,
}
docutils_settings.update(getattr(settings, 'RESTRUCTUREDTEXT_FILTER_SETTINGS', {}))
docutils_settings.update(
getattr(settings, 'RESTRUCTUREDTEXT_FILTER_SETTINGS', {}),
)
try:
parts = publish_parts(
source=force_bytes(value),
writer_name="html4css1",
writer_name='html4css1',
settings_overrides=docutils_settings,
)
except ApplicationError:
return force_text(value)
out = force_text(parts["fragment"])
out = force_text(parts['fragment'])
try:
if short:
out = out.split("\n")[0]
out = out.split('\n')[0]
except IndexError:
pass
return mark_safe(out)

View File

@ -1,6 +1,6 @@
"""Template tags to query projects by privacy."""
# -*- coding: utf-8 -*-
from __future__ import absolute_import
"""Template tags to query projects by privacy."""
from django import template
@ -18,6 +18,9 @@ def is_admin(user, project):
@register.simple_tag(takes_context=True)
def get_public_projects(context, user):
projects = Project.objects.for_user_and_viewer(user=user, viewer=context['request'].user)
projects = Project.objects.for_user_and_viewer(
user=user,
viewer=context['request'].user,
)
context['public_projects'] = projects
return ''

View File

@ -1,6 +1,6 @@
import pytest
# -*- coding: utf-8 -*-
import django_dynamic_fixture
import pytest
from django.contrib.auth.models import User
from readthedocs.oauth.models import RemoteOrganization
@ -8,15 +8,13 @@ from readthedocs.projects.models import Project
@pytest.mark.django_db
class TestProjectOrganizationSignal(object):
class TestProjectOrganizationSignal:
@pytest.mark.parametrize('model_class', [Project, RemoteOrganization])
def test_project_organization_get_deleted_upon_user_delete(self, model_class):
"""
If the user has Project or RemoteOrganization where he is the only user,
upon deleting his account, the Project or RemoteOrganization should also get
deleted.
"""
"""If the user has Project or RemoteOrganization where he is the only
user, upon deleting his account, the Project or RemoteOrganization
should also get deleted."""
obj = django_dynamic_fixture.get(model_class)
user1 = django_dynamic_fixture.get(User)
@ -33,10 +31,8 @@ class TestProjectOrganizationSignal(object):
@pytest.mark.parametrize('model_class', [Project, RemoteOrganization])
def test_multiple_users_project_organization_not_delete(self, model_class):
"""
Check Project or RemoteOrganization which have multiple users do not get deleted
when any of the user delete his account.
"""
"""Check Project or RemoteOrganization which have multiple users do not
get deleted when any of the user delete his account."""
obj = django_dynamic_fixture.get(model_class)
user1 = django_dynamic_fixture.get(User)

View File

@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
"""URL configuration for core app."""
from __future__ import absolute_import
@ -8,53 +10,77 @@ from readthedocs.core import views
from readthedocs.core.views import hooks, serve
from readthedocs.projects.feeds import LatestProjectsFeed, NewProjectsFeed
docs_urls = [
url((r'^docs/(?P<project_slug>{project_slug})/page/'
r'(?P<filename>{filename_slug})$'.format(**pattern_opts)),
url(
(
r'^docs/(?P<project_slug>{project_slug})/page/'
r'(?P<filename>{filename_slug})$'.format(**pattern_opts)
),
serve.redirect_page_with_filename,
name='docs_detail'),
url((r'^docs/(?P<project_slug>{project_slug})/'
r'(?:|projects/(?P<subproject_slug>{project_slug})/)$'.format(**pattern_opts)),
name='docs_detail',
),
url(
(
r'^docs/(?P<project_slug>{project_slug})/'
r'(?:|projects/(?P<subproject_slug>{project_slug})/)$'.format(
**pattern_opts
)
),
serve.redirect_project_slug,
name='docs_detail'),
url((r'^docs/(?P<project_slug>{project_slug})/'
r'(?:|projects/(?P<subproject_slug>{project_slug})/)'
r'(?P<lang_slug>{lang_slug})/'
r'(?P<version_slug>{version_slug})/'
r'(?P<filename>{filename_slug})'.format(**pattern_opts)),
name='docs_detail',
),
url(
(
r'^docs/(?P<project_slug>{project_slug})/'
r'(?:|projects/(?P<subproject_slug>{project_slug})/)'
r'(?P<lang_slug>{lang_slug})/'
r'(?P<version_slug>{version_slug})/'
r'(?P<filename>{filename_slug})'.format(**pattern_opts)
),
serve.serve_docs,
name='docs_detail'),
name='docs_detail',
),
]
core_urls = [
# Hooks
url(r'^github', hooks.github_build, name='github_build'),
url(r'^gitlab', hooks.gitlab_build, name='gitlab_build'),
url(r'^bitbucket', hooks.bitbucket_build, name='bitbucket_build'),
url((r'^build/'
r'(?P<project_id_or_slug>{project_slug})'.format(**pattern_opts)),
url(
(
r'^build/'
r'(?P<project_id_or_slug>{project_slug})'.format(**pattern_opts)
),
hooks.generic_build,
name='generic_build'),
name='generic_build',
),
# Random other stuff
url(r'^random/(?P<project_slug>{project_slug})'.format(**pattern_opts),
url(
r'^random/(?P<project_slug>{project_slug})'.format(**pattern_opts),
views.random_page,
name='random_page'),
name='random_page',
),
url(r'^random/$', views.random_page, name='random_page'),
url((r'^wipe/(?P<project_slug>{project_slug})/'
r'(?P<version_slug>{version_slug})/$'.format(**pattern_opts)),
url(
(
r'^wipe/(?P<project_slug>{project_slug})/'
r'(?P<version_slug>{version_slug})/$'.format(**pattern_opts)
),
views.wipe_version,
name='wipe_version'),
name='wipe_version',
),
]
deprecated_urls = [
url(r'^feeds/new/$',
url(
r'^feeds/new/$',
NewProjectsFeed(),
name="new_feed"),
url(r'^feeds/latest/$',
name='new_feed',
),
url(
r'^feeds/latest/$',
LatestProjectsFeed(),
name="latest_feed"),
name='latest_feed',
),
]

View File

@ -1,47 +1,59 @@
"""URL configuration for a single version."""
from __future__ import absolute_import
# -*- coding: utf-8 -*-
"""URL configuration for a single version."""
from functools import reduce
from operator import add
from django.conf.urls import url
from django.conf import settings
from django.conf.urls import url
from django.conf.urls.static import static
from readthedocs.constants import pattern_opts
from readthedocs.core.views import serve
handler500 = 'readthedocs.core.views.server_error_500'
handler404 = 'readthedocs.core.views.server_error_404'
single_version_urls = [
url(r'^(?:|projects/(?P<subproject_slug>{project_slug})/)'
url(
r'^(?:|projects/(?P<subproject_slug>{project_slug})/)'
r'page/(?P<filename>.*)$'.format(**pattern_opts),
serve.redirect_page_with_filename,
name='docs_detail'),
url((r'^(?:|projects/(?P<subproject_slug>{project_slug})/)'
r'(?P<filename>{filename_slug})$'.format(**pattern_opts)),
name='docs_detail',
),
url(
(
r'^(?:|projects/(?P<subproject_slug>{project_slug})/)'
r'(?P<filename>{filename_slug})$'.format(**pattern_opts)
),
serve.serve_docs,
name='docs_detail'),
name='docs_detail',
),
]
groups = [single_version_urls]
# Needed to serve media locally
if getattr(settings, 'DEBUG', False):
groups.insert(0, static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))
groups.insert(
0,
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT),
)
# Allow `/docs/<foo>` URL's when not using subdomains or during local dev
if not getattr(settings, 'USE_SUBDOMAIN', False) or settings.DEBUG:
docs_url = [
url((r'^docs/(?P<project_slug>[-\w]+)/'
r'(?:|projects/(?P<subproject_slug>{project_slug})/)'
r'(?P<filename>{filename_slug})$'.format(**pattern_opts)),
url(
(
r'^docs/(?P<project_slug>[-\w]+)/'
r'(?:|projects/(?P<subproject_slug>{project_slug})/)'
r'(?P<filename>{filename_slug})$'.format(**pattern_opts)
),
serve.serve_docs,
name='docs_detail')
name='docs_detail',
),
]
groups.insert(1, docs_url)
urlpatterns = reduce(add, groups)

View File

@ -1,52 +1,63 @@
# -*- coding: utf-8 -*-
"""URL configurations for subdomains."""
from __future__ import absolute_import
from functools import reduce
from operator import add
from django.conf.urls import url
from django.conf import settings
from django.conf.urls import url
from django.conf.urls.static import static
from readthedocs.constants import pattern_opts
from readthedocs.core.views import server_error_404, server_error_500
from readthedocs.core.views.serve import (
redirect_page_with_filename,
redirect_project_slug, serve_docs, robots_txt,
redirect_project_slug,
robots_txt,
serve_docs,
)
from readthedocs.core.views import (
server_error_500,
server_error_404,
)
from readthedocs.constants import pattern_opts
handler500 = server_error_500
handler404 = server_error_404
subdomain_urls = [
url(r'robots.txt$', robots_txt, name='robots_txt'),
url(r'^(?:|projects/(?P<subproject_slug>{project_slug})/)'
url(
r'^(?:|projects/(?P<subproject_slug>{project_slug})/)'
r'page/(?P<filename>.*)$'.format(**pattern_opts),
redirect_page_with_filename,
name='docs_detail'),
url((r'^(?:|projects/(?P<subproject_slug>{project_slug})/)$').format(**pattern_opts),
name='docs_detail',
),
url(
(r'^(?:|projects/(?P<subproject_slug>{project_slug})/)$').format(
**pattern_opts
),
redirect_project_slug,
name='redirect_project_slug'),
url((r'^(?:|projects/(?P<subproject_slug>{project_slug})/)'
r'(?P<lang_slug>{lang_slug})/'
r'(?P<version_slug>{version_slug})/'
r'(?P<filename>{filename_slug})$'.format(**pattern_opts)),
name='redirect_project_slug',
),
url(
(
r'^(?:|projects/(?P<subproject_slug>{project_slug})/)'
r'(?P<lang_slug>{lang_slug})/'
r'(?P<version_slug>{version_slug})/'
r'(?P<filename>{filename_slug})$'.format(**pattern_opts)
),
serve_docs,
name='docs_detail'),
name='docs_detail',
),
]
groups = [subdomain_urls]
# Needed to serve media locally
if getattr(settings, 'DEBUG', False):
groups.insert(0, static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))
groups.insert(
0,
static(
settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT,
),
)
urlpatterns = reduce(add, groups)

View File

@ -11,7 +11,6 @@ import os
import re
from django.conf import settings
from django.utils import six
from django.utils.functional import allow_lazy
from django.utils.safestring import SafeText, mark_safe
from django.utils.text import slugify as slugify_base
@ -20,7 +19,6 @@ from celery import group, chord
from readthedocs.builds.constants import LATEST, BUILD_STATE_TRIGGERED
from readthedocs.doc_builder.constants import DOCKER_LIMITS
log = logging.getLogger(__name__)
SYNC_USER = getattr(settings, 'SYNC_USER', getpass.getuser())
@ -221,7 +219,7 @@ def slugify(value, *args, **kwargs):
return value
slugify = allow_lazy(slugify, six.text_type, SafeText)
slugify = allow_lazy(slugify, str, SafeText)
def safe_makedirs(directory_name):

View File

@ -1,11 +1,11 @@
# -*- coding: utf-8 -*-
"""Patterns for extending Read the Docs."""
from __future__ import absolute_import
import inspect
from django.conf import settings
from django.utils.module_loading import import_string
import six
def get_override_class(proxy_class, default_class=None):
@ -21,7 +21,7 @@ def get_override_class(proxy_class, default_class=None):
default_class = getattr(proxy_class, '_default_class')
class_id = '.'.join([
inspect.getmodule(proxy_class).__name__,
proxy_class.__name__
proxy_class.__name__,
])
class_path = getattr(settings, 'CLASS_OVERRIDES', {}).get(class_id)
# pylint: disable=protected-access
@ -34,14 +34,18 @@ def get_override_class(proxy_class, default_class=None):
class SettingsOverrideMeta(type):
"""Meta class for passing along classmethod class to the underlying class.""" # noqa
"""
Meta class to manage our Setting configurations.
Meta class for passing along classmethod class to the underlying class.
"""
def __getattr__(cls, attr): # noqa: pep8 false positive
proxy_class = get_override_class(cls, getattr(cls, '_default_class'))
return getattr(proxy_class, attr)
class SettingsOverrideObject(six.with_metaclass(SettingsOverrideMeta, object)):
class SettingsOverrideObject(metaclass=SettingsOverrideMeta):
"""
Base class for creating class that can be overridden.

View File

@ -1,4 +1,6 @@
"""Common task exports"""
# -*- coding: utf-8 -*-
"""Common task exports."""
from .permission_checks import user_id_matches # noqa for unused import
from .public import PublicTask # noqa

View File

@ -1,4 +1,6 @@
"""Permission checks for tasks"""
# -*- coding: utf-8 -*-
"""Permission checks for tasks."""
__all__ = ('user_id_matches',)

View File

@ -1,21 +1,18 @@
"""Celery tasks with publicly viewable status"""
# -*- coding: utf-8 -*-
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
"""Celery tasks with publicly viewable status."""
from celery import Task, states
from django.conf import settings
from .retrieve import TaskNotFound, get_task_data
__all__ = (
'PublicTask', 'TaskNoPermission', 'get_public_task_data'
)
__all__ = (
'PublicTask',
'TaskNoPermission',
'get_public_task_data',
)
STATUS_UPDATES_ENABLED = not getattr(settings, 'CELERY_ALWAYS_EAGER', False)
@ -51,7 +48,7 @@ class PublicTask(Task):
def set_permission_context(self, context):
"""
Set data that can be used by ``check_permission`` to authorize a
Set data that can be used by ``check_permission`` to authorize a.
request for the this task. By default it will be the ``kwargs`` passed
into the task.
@ -109,22 +106,26 @@ class PublicTask(Task):
def my_public_task(user_id):
pass
"""
def decorator(func):
func.check_permission = check
return func
return decorator
class TaskNoPermission(Exception):
def __init__(self, task_id, *args, **kwargs):
message = 'No permission to access task with id {id}'.format(
id=task_id)
super(TaskNoPermission, self).__init__(message, *args, **kwargs)
id=task_id,
)
super().__init__(message, *args, **kwargs)
def get_public_task_data(request, task_id):
"""
Return task details as tuple
Return task details as tuple.
Will raise `TaskNoPermission` if `request` has no permission to access info
of the task with id `task_id`. This is also the case of no task with the

View File

@ -1,27 +1,24 @@
"""Utilities for retrieving task data."""
# -*- coding: utf-8 -*-
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
"""Utilities for retrieving task data."""
from celery import states
from celery.result import AsyncResult
__all__ = ('TaskNotFound', 'get_task_data')
class TaskNotFound(Exception):
def __init__(self, task_id, *args, **kwargs):
message = 'No public task found with id {id}'.format(id=task_id)
super(TaskNotFound, self).__init__(message, *args, **kwargs)
super().__init__(message, *args, **kwargs)
def get_task_data(task_id):
"""
Will raise `TaskNotFound` if the task is in state ``PENDING`` or the task
Will raise `TaskNotFound` if the task is in state ``PENDING`` or the task.
meta data has no ``'task_name'`` key set.
"""

View File

@ -35,7 +35,7 @@ class HomepageView(TemplateView):
def get_context_data(self, **kwargs):
"""Add latest builds and featured projects."""
context = super(HomepageView, self).get_context_data(**kwargs)
context = super().get_context_data(**kwargs)
context['featured_list'] = Project.objects.filter(featured=True)
context['projects_count'] = Project.objects.count()
return context
@ -45,7 +45,7 @@ class SupportView(TemplateView):
template_name = 'support.html'
def get_context_data(self, **kwargs):
context = super(SupportView, self).get_context_data(**kwargs)
context = super().get_context_data(**kwargs)
support_email = getattr(settings, 'SUPPORT_EMAIL', None)
if not support_email:
support_email = 'support@{domain}'.format(
@ -133,13 +133,15 @@ def do_not_track(request):
dnt_header = request.META.get('HTTP_DNT')
# https://w3c.github.io/dnt/drafts/tracking-dnt.html#status-representation
return JsonResponse({ # pylint: disable=redundant-content-type-for-json-response
'policy': 'https://docs.readthedocs.io/en/latest/privacy-policy.html',
'same-party': [
'readthedocs.org',
'readthedocs.com',
'readthedocs.io', # .org Documentation Sites
'readthedocs-hosted.com', # .com Documentation Sites
],
'tracking': 'N' if dnt_header == '1' else 'T',
}, content_type='application/tracking-status+json')
return JsonResponse( # pylint: disable=redundant-content-type-for-json-response
{
'policy': 'https://docs.readthedocs.io/en/latest/privacy-policy.html',
'same-party': [
'readthedocs.org',
'readthedocs.com',
'readthedocs.io', # .org Documentation Sites
'readthedocs-hosted.com', # .com Documentation Sites
],
'tracking': 'N' if dnt_header == '1' else 'T',
}, content_type='application/tracking-status+json',
)

View File

@ -1,11 +1,6 @@
"""Views pertaining to builds."""
# -*- coding: utf-8 -*-
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
"""Views pertaining to builds."""
import json
import logging
@ -21,6 +16,7 @@ from readthedocs.projects import constants
from readthedocs.projects.models import Feature, Project
from readthedocs.projects.tasks import sync_repository_task
log = logging.getLogger(__name__)
@ -47,13 +43,14 @@ def _build_version(project, slug, already_built=()):
version = project.versions.filter(active=True, slug=slug).first()
if version and slug not in already_built:
log.info(
"(Version build) Building %s:%s",
project.slug, version.slug,
'(Version build) Building %s:%s',
project.slug,
version.slug,
)
trigger_build(project=project, version=version, force=True)
return slug
log.info("(Version build) Not Building %s", slug)
log.info('(Version build) Not Building %s', slug)
return None
@ -70,8 +67,11 @@ def build_branches(project, branch_list):
for branch in branch_list:
versions = project.versions_from_branch_name(branch)
for version in versions:
log.info("(Branch Build) Processing %s:%s",
project.slug, version.slug)
log.info(
'(Branch Build) Processing %s:%s',
project.slug,
version.slug,
)
ret = _build_version(project, version.slug, already_built=to_build)
if ret:
to_build.add(ret)
@ -95,9 +95,9 @@ def sync_versions(project):
try:
version_identifier = project.get_default_branch()
version = (
project.versions
.filter(identifier=version_identifier)
.first()
project.versions.filter(
identifier=version_identifier,
).first()
)
if not version:
log.info('Unable to sync from %s version', version_identifier)
@ -120,10 +120,13 @@ def get_project_from_url(url):
def log_info(project, msg):
log.info(constants.LOG_TEMPLATE
.format(project=project,
version='',
msg=msg))
log.info(
constants.LOG_TEMPLATE.format(
project=project,
version='',
msg=msg,
),
)
def _build_url(url, projects, branches):
@ -133,7 +136,7 @@ def _build_url(url, projects, branches):
Check each of the ``branches`` to see if they are active and should be
built.
"""
ret = ""
ret = ''
all_built = {}
all_not_building = {}
@ -156,15 +159,19 @@ def _build_url(url, projects, branches):
for project_slug, built in list(all_built.items()):
if built:
msg = '(URL Build) Build Started: %s [%s]' % (
url, ' '.join(built))
msg = '(URL Build) Build Started: {} [{}]'.format(
url,
' '.join(built),
)
log_info(project_slug, msg=msg)
ret += msg
for project_slug, not_building in list(all_not_building.items()):
if not_building:
msg = '(URL Build) Not Building: %s [%s]' % (
url, ' '.join(not_building))
msg = '(URL Build) Not Building: {} [{}]'.format(
url,
' '.join(not_building),
)
log_info(project_slug, msg=msg)
ret += msg
@ -211,14 +218,14 @@ def github_build(request): # noqa: D205
log.info(
'GitHub webhook search: url=%s branches=%s',
http_search_url,
branches
branches,
)
ssh_projects = get_project_from_url(ssh_search_url)
if ssh_projects:
log.info(
'GitHub webhook search: url=%s branches=%s',
ssh_search_url,
branches
branches,
)
projects = repo_projects | ssh_projects
return _build_url(http_search_url, projects, branches)
@ -293,24 +300,24 @@ def bitbucket_build(request):
else:
data = json.loads(request.body)
version = 2 if request.META.get('HTTP_USER_AGENT') == 'Bitbucket-Webhooks/2.0' else 1
version = 2 if request.META.get('HTTP_USER_AGENT') == 'Bitbucket-Webhooks/2.0' else 1 # yapf: disabled # noqa
if version == 1:
branches = [commit.get('branch', '')
for commit in data['commits']]
branches = [
commit.get('branch', '') for commit in data['commits']
]
repository = data['repository']
if not repository['absolute_url']:
return HttpResponse('Invalid request', status=400)
search_url = 'bitbucket.org{0}'.format(
repository['absolute_url'].rstrip('/')
search_url = 'bitbucket.org{}'.format(
repository['absolute_url'].rstrip('/'),
)
elif version == 2:
changes = data['push']['changes']
branches = [change['new']['name']
for change in changes]
branches = [change['new']['name'] for change in changes]
if not data['repository']['full_name']:
return HttpResponse('Invalid request', status=400)
search_url = 'bitbucket.org/{0}'.format(
data['repository']['full_name']
search_url = 'bitbucket.org/{}'.format(
data['repository']['full_name'],
)
except (TypeError, ValueError, KeyError):
log.exception('Invalid Bitbucket webhook payload')
@ -358,10 +365,12 @@ def generic_build(request, project_id_or_slug=None):
project = Project.objects.get(slug=project_id_or_slug)
except (Project.DoesNotExist, ValueError):
log.exception(
"(Incoming Generic Build) Repo not found: %s",
project_id_or_slug)
'(Incoming Generic Build) Repo not found: %s',
project_id_or_slug,
)
return HttpResponseNotFound(
'Repo not found: %s' % project_id_or_slug)
'Repo not found: %s' % project_id_or_slug,
)
# This endpoint doesn't require authorization, we shouldn't allow builds to
# be triggered from this any longer. Deprecation plan is to selectively
# allow access to this endpoint for now.
@ -370,11 +379,11 @@ def generic_build(request, project_id_or_slug=None):
if request.method == 'POST':
slug = request.POST.get('version_slug', project.default_version)
log.info(
"(Incoming Generic Build) %s [%s]",
'(Incoming Generic Build) %s [%s]',
project.slug,
slug,
)
_build_version(project, slug)
else:
return HttpResponse("You must POST to this resource.")
return HttpResponse('You must POST to this resource.')
return redirect('builds_project_list', project.slug)

View File

@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
"""
Doc serving from Python.
@ -25,18 +26,14 @@ PYTHON_MEDIA (False) - Set this to True to serve docs & media from Python
SERVE_DOCS (['private']) - The list of ['private', 'public'] docs to serve.
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
import logging
import mimetypes
import os
from functools import wraps
from django.conf import settings
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import get_object_or_404
from django.shortcuts import render
from django.http import Http404, HttpResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render
from django.utils.encoding import iri_to_uri
from django.views.static import serve
@ -47,6 +44,7 @@ from readthedocs.core.symlink import PrivateSymlink, PublicSymlink
from readthedocs.projects import constants
from readthedocs.projects.models import Project, ProjectRelationship
log = logging.getLogger(__name__)
@ -58,8 +56,11 @@ def map_subproject_slug(view_func):
.. warning:: Does not take into account any kind of privacy settings.
"""
@wraps(view_func)
def inner_view(request, subproject=None, subproject_slug=None, *args, **kwargs): # noqa
def inner_view( # noqa
request, subproject=None, subproject_slug=None, *args, **kwargs,
):
if subproject is None and subproject_slug:
# Try to fetch by subproject alias first, otherwise we might end up
# redirected to an unrelated project.
@ -85,8 +86,11 @@ def map_project_slug(view_func):
.. warning:: Does not take into account any kind of privacy settings.
"""
@wraps(view_func)
def inner_view(request, project=None, project_slug=None, *args, **kwargs): # noqa
def inner_view( # noqa
request, project=None, project_slug=None, *args, **kwargs
):
if project is None:
if not project_slug:
project_slug = request.slug
@ -111,13 +115,14 @@ def redirect_project_slug(request, project, subproject): # pylint: disable=unus
def redirect_page_with_filename(request, project, subproject, filename): # pylint: disable=unused-argument # noqa
"""Redirect /page/file.html to /en/latest/file.html."""
return HttpResponseRedirect(
resolve(subproject or project, filename=filename))
resolve(subproject or project, filename=filename),
)
def _serve_401(request, project):
res = render(request, '401.html')
res.status_code = 401
log.debug('Unauthorized access to {0} documentation'.format(project.slug))
log.debug('Unauthorized access to {} documentation'.format(project.slug))
return res
@ -129,7 +134,8 @@ def _serve_file(request, filename, basepath):
# Serve from Nginx
content_type, encoding = mimetypes.guess_type(
os.path.join(basepath, filename))
os.path.join(basepath, filename),
)
content_type = content_type or 'application/octet-stream'
response = HttpResponse(content_type=content_type)
if encoding:
@ -155,9 +161,14 @@ def _serve_file(request, filename, basepath):
@map_project_slug
@map_subproject_slug
def serve_docs(
request, project, subproject, lang_slug=None, version_slug=None,
filename=''):
"""Exists to map existing proj, lang, version, filename views to the file format."""
request,
project,
subproject,
lang_slug=None,
version_slug=None,
filename='',
):
"""Map existing proj, lang, version, filename views to the file format."""
if not version_slug:
version_slug = project.get_default_version()
try:
@ -222,7 +233,8 @@ def _serve_symlink_docs(request, project, privacy_level, filename=''):
files_tried.append(os.path.join(basepath, filename))
raise Http404(
'File not found. Tried these files: %s' % ','.join(files_tried))
'File not found. Tried these files: %s' % ','.join(files_tried),
)
@map_project_slug

View File

@ -1,11 +1,10 @@
# -*- coding: utf-8 -*-
"""
MkDocs_ backend for building docs.
.. _MkDocs: http://www.mkdocs.org/
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
import json
import logging
import os
@ -18,6 +17,7 @@ from readthedocs.doc_builder.base import BaseBuilder
from readthedocs.doc_builder.exceptions import MkDocsYAMLParseError
from readthedocs.projects.models import Feature
log = logging.getLogger(__name__)
@ -44,10 +44,11 @@ class BaseMkdocs(BaseBuilder):
DEFAULT_THEME_NAME = 'mkdocs'
def __init__(self, *args, **kwargs):
super(BaseMkdocs, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self.old_artifact_path = os.path.join(
self.version.project.checkout_path(self.version.slug),
self.build_dir)
self.build_dir,
)
self.root_path = self.version.project.checkout_path(self.version.slug)
self.yaml_file = self.get_yaml_config()
@ -67,14 +68,13 @@ class BaseMkdocs(BaseBuilder):
else:
self.DEFAULT_THEME_NAME = 'mkdocs'
def get_yaml_config(self):
"""Find the ``mkdocs.yml`` file in the project root."""
mkdoc_path = self.config.mkdocs.configuration
if not mkdoc_path:
mkdoc_path = os.path.join(
self.project.checkout_path(self.version.slug),
'mkdocs.yml'
'mkdocs.yml',
)
if not os.path.exists(mkdoc_path):
return None
@ -87,9 +87,7 @@ class BaseMkdocs(BaseBuilder):
:raises: ``MkDocsYAMLParseError`` if failed due to syntax errors.
"""
try:
return yaml.safe_load(
open(self.yaml_file, 'r')
)
return yaml.safe_load(open(self.yaml_file, 'r'),)
except IOError:
return {
'site_name': self.version.project.name,
@ -98,10 +96,13 @@ class BaseMkdocs(BaseBuilder):
note = ''
if hasattr(exc, 'problem_mark'):
mark = exc.problem_mark
note = ' (line %d, column %d)' % (mark.line + 1, mark.column + 1)
note = ' (line %d, column %d)' % (
mark.line + 1,
mark.column + 1,
)
raise MkDocsYAMLParseError(
'Your mkdocs.yml could not be loaded, '
'possibly due to a syntax error{note}'.format(note=note)
'possibly due to a syntax error{note}'.format(note=note),
)
def append_conf(self, **__):
@ -153,13 +154,13 @@ class BaseMkdocs(BaseBuilder):
# of the mkdocs configuration file.
docs_path = os.path.join(
os.path.dirname(self.yaml_file),
docs_dir
docs_dir,
)
# RTD javascript writing
rtd_data = self.generate_rtd_data(
docs_dir=os.path.relpath(docs_path, self.root_path),
mkdocs_config=user_config
mkdocs_config=user_config,
)
with open(os.path.join(docs_path, 'readthedocs-data.js'), 'w') as f:
f.write(rtd_data)
@ -178,7 +179,7 @@ class BaseMkdocs(BaseBuilder):
# Write the modified mkdocs configuration
yaml.safe_dump(
user_config,
open(self.yaml_file, 'w')
open(self.yaml_file, 'w'),
)
# Write the mkdocs.yml to the build logs
@ -205,13 +206,21 @@ class BaseMkdocs(BaseBuilder):
'programming_language': self.version.project.programming_language,
'page': None,
'theme': self.get_theme_name(mkdocs_config),
'builder': "mkdocs",
'builder': 'mkdocs',
'docroot': docs_dir,
'source_suffix': ".md",
'api_host': getattr(settings, 'PUBLIC_API_URL', 'https://readthedocs.org'),
'source_suffix': '.md',
'api_host': getattr(
settings,
'PUBLIC_API_URL',
'https://readthedocs.org',
),
'ad_free': not self.project.show_advertising,
'commit': self.version.project.vcs_repo(self.version.slug).commit,
'global_analytics_code': getattr(settings, 'GLOBAL_ANALYTICS_CODE', 'UA-17997319-1'),
'global_analytics_code': getattr(
settings,
'GLOBAL_ANALYTICS_CODE',
'UA-17997319-1',
),
'user_analytics_code': analytics_code,
}
data_json = json.dumps(readthedocs_data, indent=4)
@ -232,21 +241,22 @@ class BaseMkdocs(BaseBuilder):
self.python_env.venv_bin(filename='mkdocs'),
self.builder,
'--clean',
'--site-dir', self.build_dir,
'--config-file', self.yaml_file,
'--site-dir',
self.build_dir,
'--config-file',
self.yaml_file,
]
if self.config.mkdocs.fail_on_warning:
build_command.append('--strict')
cmd_ret = self.run(
*build_command,
cwd=checkout_path,
*build_command, cwd=checkout_path,
bin_path=self.python_env.venv_bin()
)
return cmd_ret.successful
def get_theme_name(self, mkdocs_config):
"""
Get the theme configuration in the mkdocs_config
Get the theme configuration in the mkdocs_config.
In v0.17.0, the theme configuration switched
from two separate configs (both optional) to a nested directive.

View File

@ -1,30 +1,27 @@
# -*- coding: utf-8 -*-
"""
Sphinx_ backend for building docs.
.. _Sphinx: http://www.sphinx-doc.org/
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
import codecs
import shutil
import logging
import os
import shutil
import sys
import zipfile
from glob import glob
import six
from django.conf import settings
from django.template import loader as template_loader
from django.template.loader import render_to_string
from readthedocs.builds import utils as version_utils
from readthedocs.projects.exceptions import ProjectConfigurationError
from readthedocs.projects.models import Feature
from readthedocs.projects.utils import safe_write
from readthedocs.restapi.client import api
from readthedocs.projects.models import Feature
from ..base import BaseBuilder, restoring_chdir
from ..constants import PDF_RE
@ -32,6 +29,7 @@ from ..environments import BuildCommand, DockerBuildCommand
from ..exceptions import BuildEnvironmentError
from ..signals import finalize_sphinx_context_data
log = logging.getLogger(__name__)
@ -40,14 +38,14 @@ class BaseSphinx(BaseBuilder):
"""The parent for most sphinx builders."""
def __init__(self, *args, **kwargs):
super(BaseSphinx, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self.config_file = self.config.sphinx.configuration
try:
if not self.config_file:
self.config_file = self.project.conf_file(self.version.slug)
self.old_artifact_path = os.path.join(
os.path.dirname(self.config_file),
self.sphinx_build_dir
self.sphinx_build_dir,
)
except ProjectConfigurationError:
docs_dir = self.docs_dir()
@ -60,11 +58,13 @@ class BaseSphinx(BaseBuilder):
"""Create ``conf.py`` if it doesn't exist."""
docs_dir = self.docs_dir()
conf_template = render_to_string(
'sphinx/conf.py.conf', {
'sphinx/conf.py.conf',
{
'project': self.project,
'version': self.version,
'master_doc': master_doc,
})
},
)
conf_file = os.path.join(docs_dir, 'conf.py')
safe_write(conf_file, conf_template)
@ -76,25 +76,28 @@ class BaseSphinx(BaseBuilder):
os.path.dirname(
os.path.relpath(
self.config_file,
self.project.checkout_path(self.version.slug)
)
self.project.checkout_path(self.version.slug),
),
),
'',
)
remote_version = self.version.commit_name
github_user, github_repo = version_utils.get_github_username_repo(
url=self.project.repo)
url=self.project.repo,
)
github_version_is_editable = (self.version.type == 'branch')
display_github = github_user is not None
bitbucket_user, bitbucket_repo = version_utils.get_bitbucket_username_repo( # noqa
url=self.project.repo)
url=self.project.repo,
)
bitbucket_version_is_editable = (self.version.type == 'branch')
display_bitbucket = bitbucket_user is not None
gitlab_user, gitlab_repo = version_utils.get_gitlab_username_repo(
url=self.project.repo)
url=self.project.repo,
)
gitlab_version_is_editable = (self.version.type == 'branch')
display_gitlab = gitlab_user is not None
@ -146,7 +149,7 @@ class BaseSphinx(BaseBuilder):
# Features
'dont_overwrite_sphinx_context': self.project.has_feature(
Feature.DONT_OVERWRITE_SPHINX_CONTEXT
Feature.DONT_OVERWRITE_SPHINX_CONTEXT,
),
}
@ -159,26 +162,25 @@ class BaseSphinx(BaseBuilder):
return data
def append_conf(self, **__):
"""Find or create a ``conf.py`` with a rendered ``doc_builder/conf.py.tmpl`` appended"""
"""
Find or create a ``conf.py`` and appends default content.
The default content is rendered from ``doc_builder/conf.py.tmpl``.
"""
if self.config_file is None:
master_doc = self.create_index(extension='rst')
self._write_config(master_doc=master_doc)
try:
self.config_file = (
self.config_file or
self.project.conf_file(self.version.slug)
self.config_file or self.project.conf_file(self.version.slug)
)
outfile = codecs.open(self.config_file, encoding='utf-8', mode='a')
except (ProjectConfigurationError, IOError):
trace = sys.exc_info()[2]
six.reraise(
ProjectConfigurationError,
ProjectConfigurationError(
ProjectConfigurationError.NOT_FOUND
),
trace
)
raise ProjectConfigurationError(
ProjectConfigurationError.NOT_FOUND,
).with_traceback(trace)
# Append config to project conf file
tmpl = template_loader.get_template('doc_builder/conf.py.tmpl')
@ -222,8 +224,7 @@ class BaseSphinx(BaseBuilder):
self.sphinx_build_dir,
])
cmd_ret = self.run(
*build_command,
cwd=os.path.dirname(self.config_file),
*build_command, cwd=os.path.dirname(self.config_file),
bin_path=self.python_env.venv_bin()
)
return cmd_ret.successful
@ -234,18 +235,19 @@ class HtmlBuilder(BaseSphinx):
sphinx_build_dir = '_build/html'
def __init__(self, *args, **kwargs):
super(HtmlBuilder, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self.sphinx_builder = 'readthedocs'
def move(self, **__):
super(HtmlBuilder, self).move()
super().move()
# Copy JSON artifacts to its own directory
# to keep compatibility with the older builder.
json_path = os.path.abspath(
os.path.join(self.old_artifact_path, '..', 'json')
os.path.join(self.old_artifact_path, '..', 'json'),
)
json_path_target = self.project.artifact_path(
version=self.version.slug, type_='sphinx_search'
version=self.version.slug,
type_='sphinx_search',
)
if os.path.exists(json_path):
if os.path.exists(json_path_target):
@ -253,19 +255,17 @@ class HtmlBuilder(BaseSphinx):
log.info('Copying json on the local filesystem')
shutil.copytree(
json_path,
json_path_target
json_path_target,
)
else:
log.warning(
'Not moving json because the build dir is unknown.'
)
log.warning('Not moving json because the build dir is unknown.',)
class HtmlDirBuilder(HtmlBuilder):
type = 'sphinx_htmldir'
def __init__(self, *args, **kwargs):
super(HtmlDirBuilder, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self.sphinx_builder = 'readthedocsdirhtml'
@ -273,7 +273,7 @@ class SingleHtmlBuilder(HtmlBuilder):
type = 'sphinx_singlehtml'
def __init__(self, *args, **kwargs):
super(SingleHtmlBuilder, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self.sphinx_builder = 'readthedocssinglehtml'
@ -304,7 +304,8 @@ class LocalMediaBuilder(BaseSphinx):
filename=to_write,
arcname=os.path.join(
'{}-{}'.format(self.project.slug, self.version.slug),
to_write),
to_write,
),
)
archive.close()
@ -338,7 +339,7 @@ class LatexBuildCommand(BuildCommand):
"""Ignore LaTeX exit code if there was file output."""
def run(self):
super(LatexBuildCommand, self).run()
super().run()
# Force LaTeX exit code to be a little more optimistic. If LaTeX
# reports an output file, let's just assume we're fine.
if PDF_RE.search(self.output):
@ -350,7 +351,7 @@ class DockerLatexBuildCommand(DockerBuildCommand):
"""Ignore LaTeX exit code if there was file output."""
def run(self):
super(DockerLatexBuildCommand, self).run()
super().run()
# Force LaTeX exit code to be a little more optimistic. If LaTeX
# reports an output file, let's just assume we're fine.
if PDF_RE.search(self.output):
@ -393,11 +394,16 @@ class PdfBuilder(BaseSphinx):
# Run LaTeX -> PDF conversions
pdflatex_cmds = [
['pdflatex', '-interaction=nonstopmode', tex_file]
for tex_file in tex_files] # yapf: disable
for tex_file in tex_files
] # yapf: disable
makeindex_cmds = [
['makeindex', '-s', 'python.ist', '{0}.idx'.format(
os.path.splitext(os.path.relpath(tex_file, latex_cwd))[0])]
for tex_file in tex_files] # yapf: disable
[
'makeindex', '-s', 'python.ist', '{}.idx'.format(
os.path.splitext(os.path.relpath(tex_file, latex_cwd))[0],
),
]
for tex_file in tex_files
] # yapf: disable
if self.build_env.command_class == DockerBuildCommand:
latex_class = DockerLatexBuildCommand
@ -406,15 +412,27 @@ class PdfBuilder(BaseSphinx):
pdf_commands = []
for cmd in pdflatex_cmds:
cmd_ret = self.build_env.run_command_class(
cls=latex_class, cmd=cmd, cwd=latex_cwd, warn_only=True)
cls=latex_class,
cmd=cmd,
cwd=latex_cwd,
warn_only=True,
)
pdf_commands.append(cmd_ret)
for cmd in makeindex_cmds:
cmd_ret = self.build_env.run_command_class(
cls=latex_class, cmd=cmd, cwd=latex_cwd, warn_only=True)
cls=latex_class,
cmd=cmd,
cwd=latex_cwd,
warn_only=True,
)
pdf_commands.append(cmd_ret)
for cmd in pdflatex_cmds:
cmd_ret = self.build_env.run_command_class(
cls=latex_class, cmd=cmd, cwd=latex_cwd, warn_only=True)
cls=latex_class,
cmd=cmd,
cwd=latex_cwd,
warn_only=True,
)
pdf_match = PDF_RE.search(cmd_ret.output)
if pdf_match:
self.pdf_file_name = pdf_match.group(1).strip()
@ -448,7 +466,9 @@ class PdfBuilder(BaseSphinx):
from_file = None
if from_file:
to_file = os.path.join(
self.target, '{}.pdf'.format(self.project.slug))
self.target,
'{}.pdf'.format(self.project.slug),
)
self.run(
'mv',
'-f',

View File

@ -1,15 +1,13 @@
# -*- coding: utf-8 -*-
"""Base classes for Builders."""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
"""Base classes for Builders."""
import logging
import os
import shutil
from builtins import object
from functools import wraps
log = logging.getLogger(__name__)
@ -26,7 +24,7 @@ def restoring_chdir(fn):
return decorator
class BaseBuilder(object):
class BaseBuilder:
"""
The Base for all Builders. Defines the API for subclasses.
@ -49,7 +47,9 @@ class BaseBuilder(object):
self.config = python_env.config if python_env else None
self._force = force
self.target = self.project.artifact_path(
version=self.version.slug, type_=self.type)
version=self.version.slug,
type_=self.type,
)
def force(self, **__):
"""An optional step to force a build even when nothing has changed."""
@ -70,7 +70,7 @@ class BaseBuilder(object):
shutil.copytree(
self.old_artifact_path,
self.target,
ignore=shutil.ignore_patterns(*self.ignore_patterns)
ignore=shutil.ignore_patterns(*self.ignore_patterns),
)
else:
log.warning('Not moving docs, because the build dir is unknown.')
@ -99,10 +99,14 @@ class BaseBuilder(object):
docs_dir = self.docs_dir()
index_filename = os.path.join(
docs_dir, 'index.{ext}'.format(ext=extension))
docs_dir,
'index.{ext}'.format(ext=extension),
)
if not os.path.exists(index_filename):
readme_filename = os.path.join(
docs_dir, 'README.{ext}'.format(ext=extension))
docs_dir,
'README.{ext}'.format(ext=extension),
)
if os.path.exists(readme_filename):
return 'README'

Some files were not shown because too many files have changed in this diff Show More