-
Notifications
You must be signed in to change notification settings - Fork 203
/
Copy pathhelpers.py
774 lines (627 loc) · 25.5 KB
/
helpers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""Webcompat.com relies on many little functions for processing data.
We collect them in that module.
"""
from functools import update_wrapper
from functools import wraps
import hashlib
import json
import logging
import os
import math
import random
import re
import urllib.parse
from flask import abort
from flask import g
from flask import make_response
from flask import request
from flask import session
import requests
from ua_parser import user_agent_parser
from webcompat import api
from webcompat import app
from webcompat import github
API_URI = 'https://api.github.com/'
AUTH_HEADERS = {'Authorization': 'token {0}'.format(app.config['OAUTH_TOKEN']),
'User-Agent': 'webcompat/webcompat-bot'}
HOST_WHITELIST = ('webcompat.com', 'staging.webcompat.com',
'127.0.0.1', 'localhost')
FIXTURES_PATH = os.getcwd() + '/tests/fixtures'
STATIC_PATH = os.getcwd() + '/webcompat/static'
JSON_MIME = 'application/json'
REPO_URI = app.config['ISSUES_REPO_URI']
cache_dict = {}
log = app.logger
log.setLevel(logging.INFO)
@app.template_filter('bust_cache')
def bust_cache(file_path):
"""Jinja2 filter to add a cache busting param based on md5 checksum.
Uses a simple cache_dict to we don't have to hash each file for every
request. This is kept in-memory so it will be blown away when the app
is restarted (which is when file changes would have been deployed).
"""
def get_checksum(file_path):
try:
checksum = cache_dict[file_path]
except KeyError:
checksum = md5_checksum(file_path)
cache_dict[file_path] = checksum
return checksum
return file_path + '?' + get_checksum(STATIC_PATH + file_path)
@app.context_processor
def register_ab_active():
"""Register `ab_active` in jinja context"""
return dict(ab_active=ab_active)
def md5_checksum(file_path):
"""Return the md5 checksum for a given file path."""
with open(file_path, 'rb') as fh:
m = hashlib.md5()
while True:
# only read in 8k of the file at a time
data = fh.read(8192)
if not data:
break
m.update(data)
return m.hexdigest()
def get_str_value(val):
"""Map values from JSON to python."""
details_map = {False: 'false', True: 'true', None: 'null'}
if isinstance(val, (bool, type(None))):
return details_map[val]
if isinstance(val, str):
return val
return str(val)
def get_user_info():
"""Grab the username and avatar URL from GitHub."""
if session.get('username') and session.get('avatar_url'):
return
else:
gh_user = github.get('user')
session['username'] = gh_user.get('login')
session['avatar_url'] = gh_user.get('avatar_url')
def get_version_string(dictionary):
"""Return version string from dict.
Used on `user_agent` or `os` dict
with `major`, and optional `minor` and `patch`.
Minor and patch, orderly, are added only if they exist.
"""
version = dictionary.get('major')
if not version:
return ''
minor = dictionary.get('minor')
if not minor:
return version
patch = dictionary.get('patch')
if not patch:
return version + "." + minor
return version + "." + minor + "." + patch
def get_name(dictionary):
"""Return name from UA or OS dictionary's `family`.
As bizarre UA or OS strings can be parsed like so:
{'major': None, 'minor': None, 'family': 'Other', 'patch': None}
we return "Unknown", rather than "Other"
"""
name = dictionary.get('family')
if name.lower() == "other":
name = "Unknown"
return name
def get_browser(user_agent_string=None):
"""Return browser name family and version.
It will pre-populate the bug reporting form.
"""
if user_agent_string and isinstance(user_agent_string, str):
ua_dict = user_agent_parser.Parse(user_agent_string)
ua = ua_dict.get('user_agent')
name = get_name(ua)
# if browser is unknown, we don't need further details
if name == "Unknown":
return "Unknown"
version = get_version_string(ua)
# Check for tablet devices
if ua_dict.get('device').get('model') == 'Tablet':
model = '(Tablet) '
else:
model = ''
rv = '{0} {1}{2}'.format(name, model, version).rstrip()
return rv
return "Unknown"
def get_browser_name(user_agent_string=None):
"""Return just the browser name.
unknown user agents will be reported as "unknown".
"""
if user_agent_string and isinstance(user_agent_string, str):
# get_browser will return something like 'Chrome Mobile 47.0'
# we just want 'chrome mobile', i.e., the lowercase name
# w/o the version
return get_browser(user_agent_string).rsplit(' ', 1)[0].lower()
return "unknown"
def get_os(user_agent_string=None):
"""Return operating system name.
It pre-populates the bug reporting form.
"""
if user_agent_string and isinstance(user_agent_string, str):
ua_dict = user_agent_parser.Parse(user_agent_string)
os = ua_dict.get('os')
name = get_name(os)
# if OS is unknown, we don't need further details
if name == "Unknown":
return "Unknown"
version = get_version_string(os)
rv = '{0} {1}'.format(name, version).rstrip()
return rv
return "Unknown"
def get_response_headers(response):
"""Return a dictionary of headers based on a passed in Response object.
This allows us to proxy response headers from GitHub to our own responses.
"""
headers = {'etag': response.headers.get('etag'),
'cache-control': response.headers.get('cache-control'),
'content-type': JSON_MIME}
if response.headers.get('link'):
headers['link'] = rewrite_and_sanitize_link(
response.headers.get('link'))
return headers
def get_request_headers(headers, mime_type=JSON_MIME):
"""Return a dictionary of headers based on the client Request.
This allows us to send back headers to GitHub when we are acting as client.
"""
client_headers = {'Accept': mime_type}
if 'If-None-Match' in headers:
etag = headers['If-None-Match'].encode('utf-8')
client_headers['If-None-Match'] = etag
if 'User-Agent' in headers:
client_headers['User-Agent'] = headers['User-Agent']
return client_headers
def get_referer(request):
"""Return the Referer URI based on the passed in Request object.
Also validate that it came from our own server. If it didn't, check
the session for a manually stashed 'referer' key, otherwise return None.
"""
if request.referrer:
host = urllib.parse.urlparse(request.referrer).hostname
if host in HOST_WHITELIST:
return request.referrer
else:
return session.pop('referer', None)
else:
return None
def set_referer(request):
"""Set manually the referer URI.
We only allow stashing a URI in here if it is whitelisted against
the HOST_WHITELIST.
"""
if request.referrer:
host = urllib.parse.urlparse(request.referrer).hostname
if host in HOST_WHITELIST:
session['referer'] = request.referrer
def normalize_api_params(params):
"""Normalize GitHub Issues API params to Search API.
conventions:
Issues API params | Search API converted values
-------------------------|---------------------------------------
state | into q as "state:open", "state:closed"
creator | into q as "author:username"
mentioned | into q as "mentions:username"
direction | order
"""
if 'direction' in params:
params['order'] = params['direction']
del params['direction']
# these params need to be added to the "q" param as substrings
if 'state' in params:
state_param = ' state:' + params['state']
params['q'] += state_param
del params['state']
if 'creator' in params:
creator_param = ' author:' + params['creator']
params['q'] += creator_param
del params['creator']
if 'mentioned' in params:
mentioned_param = ' mentions:' + params['mentioned']
params['q'] += mentioned_param
del params['mentioned']
return params
def rewrite_links(link_header):
"""Rewrite Link header Github API endpoints to our own.
<https://api.github.com/repositories/17839063/iss...&page=2>; rel="next",
<https://api.github.com/repositories/17839063/iss...&page=4>; rel="last"
is transformed into
</api/issues?per_page=50&page=2>; rel="next",
</api/issues?per_page=50&page=4>; rel="last" etc.
"""
header_link_data = parse_link_header(link_header)
for data in header_link_data:
uri = data['link']
uri_tuple = urllib.parse.urlsplit(uri)
path = uri_tuple.path
query = uri_tuple.query
if path.startswith('/repositories/'):
# remove repositories and takes the second element
# of ['17839063', 'issues/398/comments']
path = path.lstrip('/repositories/').split('/', 1)[1]
elif path.startswith('/search/issues'):
path = 'issues/search'
api_path = '{}{}'.format('/api/', path)
data['link'] = urllib.parse.urlunsplit(('', '', api_path, query, ''))
return format_link_header(header_link_data)
def sanitize_link(link_header):
"""Remove any oauth tokens from the Link header from GitHub.
see Also rewrite_links.
"""
header_link_data = parse_link_header(link_header)
for data in header_link_data:
data['link'] = remove_oauth(data['link'])
return format_link_header(header_link_data)
def remove_oauth(uri):
"""Remove Oauth token from a uri.
Github returns Oauth tokens in some circumstances. We remove it for
avoiding to spill it in public as it's not necessary in Link Header.
"""
uri_group = urllib.parse.urlparse(uri)
parameters = uri_group.query.split('&')
clean_parameters_list = [parameter for parameter in parameters
if not parameter.startswith('access_token=')]
clean_parameters = '&'.join(clean_parameters_list)
clean_uri = uri_group._replace(query=clean_parameters)
return urllib.parse.urlunparse(clean_uri)
def rewrite_and_sanitize_link(link_header):
"""Sanitize and then rewrite a link header."""
return rewrite_links(sanitize_link(link_header))
def parse_link_header(link_header):
"""Return a structured list of objects for an HTTP Link header.
This is adjusted for github links it will break in a more generic case.
Do not use this code for your own HTTP Link header parsing.
Use something like https://pypi.python.org/pypi/LinkHeader/ instead.
"""
links_list = link_header.split(',')
header_link_data = []
for link in links_list:
# Assuming that link is `<uri>; rel="blah"`. Github only.
uri_info, rel_info = link.split(';')
uri_info = uri_info.strip()
rel_info = rel_info.strip()
rel_keyword, value = rel_info.split('=')
# rel values have the form `rel="foo"`, we want `foo`.
rel_value = value[1:-1]
# uri have the form `<http://…>`, we want `http://…`.
uri = uri_info[1:-1]
header_link_data.append({'link': uri, 'rel': rel_value})
return header_link_data
def format_link_header(link_header_data):
"""Return a string ready to be used in a Link: header."""
links = ['<{0}>; rel="{1}"'.format(data['link'], data['rel'])
for data in link_header_data]
return ', '.join(links)
def get_comment_data(request_data):
"""Return a comment ready to send to GitHub.
We do this by JSON-encoding the body property
of a request's data object.
"""
comment_data = json.loads(request_data)
return json.dumps({"body": comment_data['body']})
def get_fixture_headers(file_data):
"""Return headers to be served with a fixture file."""
headers = {'content-type': JSON_MIME}
data = json.loads(file_data)
for item in data:
if '_fixtureLinkHeader' in item:
headers.update({'link': item['_fixtureLinkHeader']})
break
return headers
def mockable_response(func):
"""Mock out API reponses with a decorator.
This allows us to send back fixture files when in TESTING mode, rather
than making API requests over the network. See /api/endpoints.py
for usage.
There are fixtures with a md5 checksum, only for requests with arguments.
"""
@wraps(func)
def wrapped_func(*args, **kwargs):
if app.config['TESTING']:
get_args = request.args.copy()
full_path = request.full_path
if get_args:
# Only requests with arguments, get a fixture with a checksum.
# We grab the full path of the request URI to compute an md5
# that will give us the right fixture file.
checksum = hashlib.md5(to_bytes(full_path)).hexdigest()
file_path = FIXTURES_PATH + request.path + "." + checksum
else:
file_path = FIXTURES_PATH + request.path
if not file_path.endswith('.json'):
file_path = file_path + '.json'
if not os.path.exists(file_path):
print('Fixture expected at: {fix}'.format(fix=file_path))
print('by the http request: {req}'.format(req=request.url))
return ('', 404)
else:
with open(file_path, 'r') as f:
data = f.read()
return (data, 200, get_fixture_headers(data))
return func(*args, **kwargs)
return wrapped_func
def extract_url(issue_body):
"""Extract the URL for an issue from WebCompat.
URL in webcompat.com bugs follow this pattern:
**URL**: https://example.com/foobar
"""
url_pattern = re.compile(r'\*\*URL\*\*\: (.+)')
url_match = re.search(url_pattern, issue_body)
if url_match:
url = url_match.group(1).strip()
if not url.startswith(('http://', 'https://')):
url = "http://%s" % url
else:
url = ""
return url
def proxy_request(method, path, params=None, headers=None, data=None):
"""Make a GitHub API request with a bot's OAuth token.
Necessary for non-logged in users.
* `path` will be appended to the end of the API_URI.
* Optionally pass in POST data via the `data` arg.
* Optionally point to a different URI via the `uri` arg.
* Optionally pass in HTTP headers to forward.
"""
# Merge passed in headers with AUTH_HEADERS, and add the etag of the
# request, if it exists, to be sent back to GitHub.
auth_headers = AUTH_HEADERS.copy()
if headers:
auth_headers.update(headers)
# Grab the correct Requests request method
req = getattr(requests, method)
# It's expected that path *won't* start with a leading /
# https://api.github.com/repos/{0}
resource_uri = API_URI + path
return req(resource_uri, data=data, params=params, headers=auth_headers)
def api_request(method, path, params=None, data=None, mime_type=JSON_MIME):
"""Handle communication with the GitHub API.
This method handles both logged-in and proxied requests.
This returns a tuple for the convenience of being able to do:
`return api_request('get', path, params=params)` directly from a view
function. Flask will turn a tuple of the format
(content, status_code, response_headers) into a Response object.
"""
request_headers = get_request_headers(g.request_headers, mime_type)
if g.user:
request_method = github.raw_request
else:
request_method = proxy_request
resource = request_method(method, path, headers=request_headers,
params=params, data=data)
if resource.status_code != 404:
return (resource.content, resource.status_code,
get_response_headers(resource))
else:
abort(404)
def add_sec_headers(response):
"""Add security-related headers to the response.
This should be used in @app.after_request to ensure the headers are
added to all responses.
"""
if not app.config['LOCALHOST']:
response.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubDomains;' # noqa
response.headers['X-Content-Type-Options'] = 'nosniff'
response.headers['X-XSS-Protection'] = '1; mode=block'
response.headers['X-Frame-Options'] = 'DENY'
if app.config['LOCALHOST']:
response.headers['Access-Control-Allow-Origin'] = '*'
def get_img_src_policy():
"""Return the img-src policy directive, depending on environment.
We allow webcompat.com-hosted images on localhost servers for convenience.
"""
policy = "img-src 'self' https://www.google-analytics.com https://*.githubusercontent.com data:; " # noqa
if app.config['LOCALHOST']:
policy = "img-src 'self' https://webcompat.com https://www.google-analytics.com https://*.githubusercontent.com data:; " # noqa
return policy
def add_csp(response):
"""Add a Content-Security-Policy header to response.
This should be used in @app.after_request to ensure the header is
added to all responses.
"""
csp_params = [
"default-src 'self'; ",
"object-src 'none'; ",
"connect-src 'self' https://api.github.com; ",
"font-src 'self' https://fonts.gstatic.com; ",
get_img_src_policy(),
"manifest-src 'self'; ",
"script-src 'self' https://www.google-analytics.com https://api.github.com 'nonce-{nonce}'; ".format(nonce=request.nonce), # noqa
"style-src 'self' 'unsafe-inline' https://fonts.googleapis.com; ",
"base-uri 'self'; ",
"frame-ancestors 'self'; ",
"report-uri /csp-report"
]
response.headers['Content-Security-Policy'] = (''.join(csp_params))
def cache_policy(private=True, uri_max_age=86400, must_revalidate=False):
"""Implement a HTTP Cache Decorator.
Adds Cache-Control headers.
* set max-age value (default: 1 day aka 86400s)
* private by default
* revalidation (False by default)
Adds Etag based on HTTP Body.
Sends a 304 Not Modified in case of If-None-Match.
"""
def set_policy(view):
@wraps(view)
def policy(*args, **kwargs):
response = make_response(view(*args, **kwargs))
# we choose if the resource is private or public for caching
if private:
response.cache_control.private = True
else:
response.cache_control.public = True
# Instructs the client if it needs to revalidate
if must_revalidate:
response.cache_control.must_revalidate = True
# Instructs how long the Cache should keep the resource
response.cache_control.max_age = uri_max_age
# Etag is based on the HTTP body
response.add_etag(response.data)
# to send a 304 Not Modified instead of a full HTTP response
response.make_conditional(request)
return response
return update_wrapper(policy, view)
return set_policy
def is_valid_issue_form(form):
"""Check if the issue form follows some requirements.
To be legit the form needs a couple of parameters
if one essential is missing, it's a bad request.
We may add more constraints in the future.
"""
values_check = False
must_parameters = [
'browser',
'description',
'os',
'problem_category',
'submit_type',
'url',
'username', ]
form_submit_values = ['github-auth-report', 'github-proxy-report']
parameters_check = set(must_parameters).issubset(list(form.keys()))
if parameters_check:
values_check = form['submit_type'] in form_submit_values
valid_form = parameters_check and values_check
if not valid_form:
log.info('is_valid_issue_form: form[submit_type] => {0}'.format(
form.get('submit_type') or 'empty submit_type value'))
log.info('is_valid_issue_form: missing param(s)? => {0}'.format(
set(must_parameters).difference(list(form.keys()))))
log.info('is_valid_issue_form: experiment branch => {0}'.format(
ab_active('exp') or 'Unknown branch'
))
log.info('is_valid_issue_form: reporter ip => {0}'.format(
request.remote_addr
))
return valid_form
def is_blacklisted_domain(domain):
"""Check if the domain is part of an exclusion list."""
# see https://github.com/webcompat/webcompat.com/issues/1141
# see https://github.com/webcompat/webcompat.com/issues/1237
# see https://github.com/webcompat/webcompat.com/issues/1627
spamlist = ['www.qiangpiaoruanjian.cn',
'mailmanager.cityweb.de',
'coco.fr']
return domain in spamlist
def form_type(form_request):
"""Check the type of form request for /issues/new.
It can return either:
* 'prefill'
* 'create'
"""
method = form_request.method
content_type = form_request.content_type
if method == 'GET':
return 'prefill'
elif method == 'POST' and content_type == 'application/json':
return 'prefill'
elif method == 'POST' and content_type.startswith('multipart/form-data'):
return 'create'
else:
return None
def prepare_form(form_request):
"""Extract all known information from the form request.
This is called by /issues/new to prepare needed by the form
before being posted on GitHub.
For HTTP POST:
The JSON content will override any existing URL parameters.
The URL parameters will be kept if non-existent in the JSON.
"""
form_data = {}
form_data['user_agent'] = request.headers.get('User-Agent')
form_data['src'] = request.args.get('src')
form_data['extra_labels'] = request.args.getlist('label')
form_data['url'] = request.args.get('url')
# we rely here on the fact we receive the right POST
# because we tested it with form_type(request)
if form_request.method == 'POST':
json_data = form_request.get_json()
form_data.update(json_data)
return form_data
def is_json_object(json_data):
"""Check if the JSON data are an object."""
return isinstance(json_data, dict)
def to_bytes(bytes_or_str):
"""Convert to bytes."""
if isinstance(bytes_or_str, str):
value = bytes_or_str.encode('utf-8') # uses 'utf-8' for encoding
else:
value = bytes_or_str
return value # Instance of bytes
def to_str(bytes_or_str):
"""Convert to str."""
if isinstance(bytes_or_str, bytes):
value = bytes_or_str.decode('utf-8') # uses 'utf-8' for encoding
else:
value = bytes_or_str
return value # Instance of str
def ab_active(exp_id):
"""Checks cookies and returns the experiment variation if variation
is still active or `False`.
"""
if ab_exempt():
return False
return g.current_experiments.get(exp_id) or False
def ab_exempt():
"""Checks if request should exempt AB experiments."""
if g.user and g.user.user_id in app.config['AB_EXEMPT_USERS']:
return True
return False
def ab_current_experiments():
"""Return the current experiments that the request is participating."""
curr_exp = {}
if ab_exempt():
return curr_exp
if request.headers.get('DNT') == '1':
return curr_exp
for exp_id in app.config['AB_EXPERIMENTS']:
active_var = request.cookies.get(exp_id) or False
if active_var:
curr_exp[exp_id] = active_var
else:
# Pick a random number in the range [0, 1) and map it to [1, 100]
selector = math.floor(random.random() * 10000) + 1
selector = selector / 100
variations = app.config['AB_EXPERIMENTS'][exp_id]['variations']
for var, (start, end) in variations.items():
if selector > start and selector <= end:
curr_exp[exp_id] = var
return curr_exp
def ab_init(response):
"""Initialize the experiment cookies in current session."""
if ab_exempt():
return response
for exp_id, var in g.current_experiments.items():
if not request.cookies.get(exp_id) or False:
max_age = app.config['AB_EXPERIMENTS'][exp_id]['max-age']
response.set_cookie(exp_id, var, max_age=max_age)
return response
def get_extra_labels(form):
"""Extract extra_labels.
If extra_labels param exists in current session, use it,
otherwise use the value coming from form. Add form-v2-experiment
label if the experiment cookie exists.
"""
extra_labels = session.pop('extra_labels', [])
if not extra_labels:
extra_labels = json.loads(form.get('extra_labels', '[]') or '[]')
if ab_active('exp') == 'form-v2':
if isinstance(extra_labels, list):
extra_labels.append('form-v2-experiment')
else:
extra_labels = ['form-v2-experiment']
return extra_labels
def get_data_from_request(request):
if 'image' in request.files and request.files['image'].filename:
return True, request.files['image']
elif 'image' in request.form:
return True, request.form['image']
elif 'console_logs' in request.form:
return False, request.form['console_logs']
else:
return False, None