test_rui.py 8.81 KB
Newer Older
1
2
import re
from pathlib import Path
3
import time
4
5
6
7
8
9
10
11
12
13
14
15
16

from lxml import etree

from rework import api
from rework.task import Task
from rework.testutils import workers, scrub
from rework_ui import taskstable


DATADIR = Path(__file__).parent / 'data'

# html editor

17
18
19
20
21
22
def normalize(htmlstr):
    tree = etree.fromstring(htmlstr, parser=etree.HTMLParser())
    return etree.tostring(tree.getroottree(),
                          pretty_print=True, method='html')


23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
def edittag(tag, editor, anstr):
    if isinstance(tag, str):
        tags = [tag]
    else:
        tags = tag
    tree = etree.fromstring(anstr, parser=etree.HTMLParser())
    for tag in tags:
        for elt in tree.xpath('//%s' % tag):
            editor(elt)
    return etree.tostring(tree.getroottree(),
                          pretty_print=True, method='html')


# test tasks

@api.task
def good_job(task):
    task.save_output(b'Well done !', raw=True)


@api.task
def bad_job(task):
    raise Exception('I am a little crasher.')


48
49
50
51
52
53
54
55
@api.task
def abortme(task):
    while True:
        time.sleep(1)


# tests

56
57
58
59
60
61
62
63
def test_no_job(client):
    res = client.get('/job_status/babar')
    assert res.status_code == 404
    assert 'NO SUCH JOB' in res.text


def test_bad_request(engine, client):
    # bad hostid
64
    res = client.put('/schedule-task/good_job?user={}&hostid={}'.format('Babar', 'fancyhost'),
65
66
67
68
69
70
                     upload_files=[('input_file', 'input.xml', b'the file', 'text/xml')]
    )
    assert res.status == '400 BAD REQUEST'
    assert b'No operation was found' in res.body

    # bad operation
71
    res = client.put('/schedule-task/fake_job?user={}'.format('Babar'),
72
73
74
75
76
77
                     upload_files=[('input_file', 'input.xml', b'the file', 'text/xml')]
    )
    assert res.status == '400 BAD REQUEST'
    assert b'No operation was found' in res.body

    # bad operation
78
    res = client.put('/schedule-task/good_job?user={}'.format('Babar'))
79
80
81
82
    assert res.status == '400 BAD REQUEST'
    assert b'input file is mandatory' in res.body


83
84
def test_abort(engine, client):
    with workers(engine) as mon:
85
        res = client.put('/schedule-task/abortme?user=Babar',
86
87
88
89
90
91
92
93
94
                         upload_files=[('input_file', 'input.xml', b'the file', 'text/xml')])
        tid = int(res.body)
        t = Task.byid(engine, tid)
        assert not t.aborted
        res = client.get(f'/abort-task/{tid}')
        mon.preemptive_kill()
        assert t.aborted


95
96
97
98
99
100
101
102
103
104
105
106
107
def test_relaunch(engine, client):
    with workers(engine) as mon:
        res = client.put('/schedule-task/good_job?user=Babar',
                         upload_files=[('input_file', 'input.xml', b'the file', 'text/xml')])
        tid = int(res.body)
        t = Task.byid(engine, tid)
        t.join()
        res = client.put(f'/relaunch-task/{tid}')
        newtid = int(res.body)
        t2 = Task.byid(engine, newtid)
        t2.join()


108
109
110
111
def test_task_life_cycle(engine, client, refresh):
    with workers(engine):
        tasks = []
        for user in ('Babar', 'Babar', 'Celeste'):
112
            res = client.put('/schedule-task/good_job?user={}'.format(user),
113
114
115
116
117
118
119
120
121
122
123
124
125
126
                             upload_files=[('input_file', 'input.xml', b'the file', 'text/xml')]
            )
            tid = int(res.body)
            t1 = Task.byid(engine, tid)
            t1.join()

            assert t1.raw_output == b'Well done !'

            res = client.get('/job_results/{}'.format(t1.tid))
            assert res.headers['Content-Type'] == 'application/zip'
            assert res.body == b'Well done !'

            tasks.append(t1)

127
        res = client.put('/schedule-task/bad_job?user=Celeste',
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
                         upload_files=[('input_file', 'input.xml', b'the file', 'text/xml')]
        )
        tid = int(res.body)

        t2 = Task.byid(engine, tid)
        t2.join()

        tasks.append(t2)

        res = client.get('/job_results/{}'.format(t2.tid))
        assert res.headers['Content-Type'] == 'text/plain; charset=utf-8'
        assert res.body.startswith(b'Traceback')
        assert 'I am a little crasher.' in res.text

        results = []
        for t in tasks:
            results.append(client.get('/delete-task/{}'.format(t.tid)))

        assert all(res.body == b'true' for res in results)

        res = client.get('/services-table')

        ipaddr = re.compile('^\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}$')
        def edit(elt):
            if elt.text:
                if 'test_rui' in elt.text:
                    elt.text = Path(elt.text).name
                elif ipaddr.match(elt.text):
                    elt.text = 'HOSTNAME'
            if 'value' in elt.attrib and ipaddr.match(elt.attrib['value']):
                elt.attrib['value'] = 'HOSTNAME'
            return elt

        html = edittag(('td', 'input'), edit, res.text)
        refpath = DATADIR / 'services.html'
        if refresh:
            refpath.write_bytes(html)
        assert html == refpath.read_bytes()


168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
def test_monitors_table(engine, client, refresh):
    with engine.begin() as cn:
        cn.execute('delete from rework.monitor')
        cn.execute('delete from rework.worker')


    with workers(engine):
        res = client.get('/workers-table')
        html = normalize(scrub(res.text))
        refpath = DATADIR / 'monitors-table.html'
        if refresh:
            refpath.write_bytes(html)
        assert html == refpath.read_bytes()

        t = api.schedule(engine, 'abortme')
        t.join('running')
        res = client.get('/workers-table')
        html = normalize(scrub(res.text))
        refpath = DATADIR / 'monitors-table-1-task.html'
        if refresh:
            refpath.write_bytes(html)
        assert html == refpath.read_bytes()


192
def test_tasks_table(engine, client, refresh):
Aurélien Campéas's avatar
Aurélien Campéas committed
193
    with engine.begin() as cn:
194
195
196
197
198
199
200
201
202
203
204
205
206
207
        cn.execute('delete from rework.task')

    with workers(engine):
        res = client.get('/tasks-table')
        assert res.text == '<p>Table under construction ...</p>'

        res = client.get('/tasks-table-hash')
        assert res.text == 'no-hash-yet'

        taskstable.refresh_tasks_file(engine)
        res = client.get('/tasks-table')
        assert res.text == (
            '<br>\n'
            '<table class="table table-sm table-bordered table-striped table-hover">\n'
208
            '<thead class="thead-inverse"><tr><th>#</th><th>service</th><th>domain</th>'
209
            '<th>queued</th><th>started</th><th>finished</th>'
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
            '<th>user</th><th>worker</th><th>status</th><th>action</th></tr></thead>\n</table>'
        )

        res = client.get('/tasks-table-hash')
        assert res.text == 'd751713988987e9331980363e24189ce'
        res = client.get('/tasks-table-hash?domain=all')
        assert res.text == 'no-hash-yet'
        res = client.get('/tasks-table-hash?domain=default')
        assert res.text == 'd751713988987e9331980363e24189ce'

        t = api.schedule(engine, 'good_job', metadata={'user': 'Babar'})
        t.join()
        taskstable.refresh_tasks_file(engine)

        res = client.get('/tasks-table')
        refpath = DATADIR / 'tasks-table.html'
        if refresh:
            refpath.write_bytes(scrub(res.text).encode('utf-8'))
        assert scrub(res.text) == refpath.read_bytes().decode('utf-8')

        count = engine.execute('select count(*) from rework.taskstable').scalar()
        assert count == 1 # only default domains, 'all' appears with many domains

        t = api.schedule(engine, 'bad_job', metadata={'user': 'Babar'})
        t.join()
        taskstable.refresh_tasks_file(engine)
        res = client.get('/tasks-table')

        srcpath = re.compile('File "(.*)"')
        def edit(elt):
            if 'title' in elt.attrib:
                elt.attrib['title'] = srcpath.sub('/path/to/src/file', elt.attrib['title'])
            return elt

        html = edittag('td', edit, res.text).decode('utf-8')
        refpath = DATADIR / 'tasks-table-error.html'
        if refresh:
            refpath.write_bytes(scrub(html).encode('utf-8'))

        assert scrub(html) == refpath.read_bytes().decode('utf-8')

    # declare an new domain
    from . import tasks
    api.freeze_operations(engine)
    with workers(engine, domain='uranus'):
        t = api.schedule(engine, 'justdoit', domain='uranus', metadata={'user': 'Celeste'})
        t.join()
        taskstable.refresh_tasks_file(engine)
        res = client.get('/tasks-table-hash?domain=uranus')
259
        assert res.text == '05265be5adad9bb8b0ee50f837535cfa'
260
261
262
263
264
265
        res = client.get('/tasks-table?domain=uranus')
        refpath = DATADIR / 'tasks-table-uranus.html'
        if refresh:
            refpath.write_bytes(scrub(res.text).encode('utf-8'))
        assert scrub(res.text) == refpath.read_bytes().decode('utf-8')