summaryrefslogtreecommitdiff
path: root/crocoite/task.py
blob: e93cfde4e9a7e5baedfd854d9a1f897e40015627 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
# Copyright (c) 2017–2018 crocoite contributors
# 
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# 
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# 
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.

"""
Celery distributed tasks
"""

import os, logging

from urllib.parse import urlsplit
from datetime import datetime
from operator import attrgetter
from itertools import chain

def _monkeyPatchSyncTasks ():
    """ Result sets don’t support the argument disable_sync_subtasks argument """
    import celery.result
    celery.result.assert_will_not_block = lambda: None

_monkeyPatchSyncTasks ()
from celery import Celery
from celery.utils.log import get_task_logger

from .browser import ChromeService
from .controller import SinglePageController, ControllerSettings, RecursiveController, defaultSettings, DepthLimit, PrefixLimit
from . import behavior
from .cli import parseRecursive

app = Celery ('crocoite.distributed')
app.config_from_object('celeryconfig')
app.conf.task_routes = {
        'crocoite.task.archive': {'queue': 'crocoite.archive'},
        'crocoite.task.controller': {'queue': 'crocoite.controller'},
        # <method>.chunks is actually a starmap job
        'celery.starmap': {'queue': 'crocoite.archive'},
        }
app.conf.task_default_queue = 'crocoite.default'
# disable prefetching, since our tasks usually run for a _very_ long time
app.conf.worker_prefetch_multiplier = 1
logger = get_task_logger('crocoite.distributed.archive')

@app.task(bind=True, track_started=True)
def archive (self, url, settings, enabledBehaviorNames):
    """
    Archive a single URL

    Supports these config keys (celeryconfig):

    warc_filename = '{domain}-{date}-{id}.warc.gz'
    temp_dir = '/tmp/'
    finished_dir = '/tmp/finished'
    """

    parsedUrl = urlsplit (url)
    outFile = app.conf.warc_filename.format (
                    id=self.request.root_id,
                    domain=parsedUrl.hostname.replace ('/', '-'),
                    date=datetime.utcnow ().isoformat (),
                    )
    outPath = os.path.join (app.conf.temp_dir, outFile)
    fd = open (outPath, 'wb')

    enabledBehavior = list (filter (lambda x: x.name in enabledBehaviorNames, behavior.available))
    settings = ControllerSettings (**settings)
    controller = SinglePageController (url, fd, behavior=enabledBehavior, settings=settings)
    ret = controller.run ()

    os.makedirs (app.conf.finished_dir, exist_ok=True)
    outPath = os.path.join (app.conf.finished_dir, outFile)
    os.rename (fd.name, outPath)

    return ret

class DistributedRecursiveController (RecursiveController):
    """ Distributed, recursive controller using celery """

    def __init__ (self, url, service=ChromeService (), behavior=behavior.available, \
            logger=logging.getLogger(__name__), settings=defaultSettings,
            recursionPolicy=DepthLimit (0), concurrency=1):
        super ().__init__ (url, None, service, behavior, logger, settings, recursionPolicy)
        self.concurrency = concurrency

    def fetch (self, urls):
        def chunksIter (urls):
            for u in urls:
                yield (u, self.settings.toDict (), list (map (attrgetter ('name'), self.behavior)))
        itemsPerTask = len (urls)//self.concurrency
        if itemsPerTask <= 0:
            itemsPerTask = len (urls)
        return chain.from_iterable (archive.chunks (chunksIter (urls), itemsPerTask).apply_async ().get ())

@app.task(bind=True, track_started=True)
def controller (self, url, settings, enabledBehaviorNames, recursive, concurrency):
    """ Recursive controller """

    recursionPolicy = parseRecursive (recursive, url)
    enabledBehavior = list (filter (lambda x: x.name in enabledBehaviorNames, behavior.available))
    settings = ControllerSettings (**settings)
    controller = DistributedRecursiveController (url, None, behavior=enabledBehavior,
            settings=settings, recursionPolicy=recursionPolicy, concurrency=concurrency)
    return controller.run ()