Skip to content

Commit 44c0462

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 738a247 commit 44c0462

File tree

2 files changed

+57
-36
lines changed

2 files changed

+57
-36
lines changed

deploy.py

Lines changed: 56 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,23 @@
11
#!/usr/bin/env python3
2-
import json
32
import argparse
3+
import json
44
import os
5-
from glob import glob
6-
from functools import partial
7-
import subprocess
8-
from urllib.request import urlopen, Request
9-
from urllib.parse import urlencode
10-
from urllib.error import HTTPError
11-
from copy import deepcopy
125
import re
136
import ssl
7+
import subprocess
8+
from copy import deepcopy
9+
from functools import partial
10+
from glob import glob
11+
from urllib.error import HTTPError
12+
from urllib.parse import urlencode
13+
from urllib.request import Request, urlopen
1414

1515
# UID for the folder under which our dashboards will be setup
1616
DEFAULT_FOLDER_UID = '70E5EE84-1217-4021-A89E-1E3DE0566D93'
1717

1818

1919
def grafana_request(endpoint, token, path, data=None, no_tls_verify=False):
20-
headers = {
21-
'Authorization': f'Bearer {token}',
22-
'Content-Type': 'application/json'
23-
}
20+
headers = {'Authorization': f'Bearer {token}', 'Content-Type': 'application/json'}
2421
method = 'GET' if data is None else 'POST'
2522
req = Request(f'{endpoint}/api{path}', headers=headers, method=method)
2623

@@ -44,28 +41,30 @@ def ensure_folder(name, uid, api):
4441
except HTTPError as e:
4542
if e.code == 404:
4643
# We got a 404 in
47-
folder = {
48-
'uid': uid,
49-
'title': name
50-
}
44+
folder = {'uid': uid, 'title': name}
5145
return api('/folders', folder)
5246
else:
5347
raise
5448

5549

5650
def build_dashboard(dashboard_path, api, global_dash=False):
57-
5851
datasources = api("/datasources")
5952
datasources_names = [ds["name"] for ds in datasources]
6053

6154
# We pass the list of all datasources because the global dashboards
6255
# use this information to show info about all datasources in the same panel
63-
return json.loads(subprocess.check_output(
64-
[
65-
"jsonnet", "-J", "vendor", dashboard_path,
66-
"--tla-code", f"datasources={datasources_names}"
67-
]
68-
).decode())
56+
return json.loads(
57+
subprocess.check_output(
58+
[
59+
"jsonnet",
60+
"-J",
61+
"vendor",
62+
dashboard_path,
63+
"--tla-code",
64+
f"datasources={datasources_names}",
65+
]
66+
).decode()
67+
)
6968

7069

7170
def layout_dashboard(dashboard):
@@ -108,11 +107,7 @@ def deploy_dashboard(dashboard_path, folder_uid, api, global_dash=False):
108107
db = layout_dashboard(db)
109108
db = populate_template_variables(api, db)
110109

111-
data = {
112-
'dashboard': db,
113-
'folderId': folder_uid,
114-
'overwrite': True
115-
}
110+
data = {'dashboard': db, 'folderId': folder_uid, 'overwrite': True}
116111
api('/dashboards/db', data)
117112

118113

@@ -125,7 +120,9 @@ def get_label_values(api, ds_id, template_query):
125120
in a dashboard
126121
"""
127122
# re.DOTALL allows the query to be multi-line
128-
match = re.match(r'label_values\((?P<query>.*),\s*(?P<label>.*)\)', template_query, re.DOTALL)
123+
match = re.match(
124+
r'label_values\((?P<query>.*),\s*(?P<label>.*)\)', template_query, re.DOTALL
125+
)
129126
query = match.group('query')
130127
label = match.group('label')
131128
query = {'match[]': query}
@@ -151,7 +148,9 @@ def populate_template_variables(api, db):
151148
for var in db.get('templating', {}).get('list', []):
152149
datasources = api("/datasources")
153150
if var["type"] == "datasource":
154-
var["options"] = [{"text": ds["name"], "value": ds["name"]} for ds in datasources]
151+
var["options"] = [
152+
{"text": ds["name"], "value": ds["name"]} for ds in datasources
153+
]
155154

156155
# default selection: first datasource in list
157156
if datasources and not var.get("current"):
@@ -188,17 +187,38 @@ def populate_template_variables(api, db):
188187
def main():
189188
parser = argparse.ArgumentParser()
190189
parser.add_argument('grafana_url', help='Grafana endpoint to deploy dashboards to')
191-
parser.add_argument('--dashboards-dir', default="dashboards", help='Directory of jsonnet dashboards to deploy')
192-
parser.add_argument('--folder-name', default='JupyterHub Default Dashboards', help='Name of Folder to deploy to')
193-
parser.add_argument('--folder-uid', default=DEFAULT_FOLDER_UID, help='UID of grafana folder to deploy to')
194-
parser.add_argument('--no-tls-verify', action='store_true', default=False,
195-
help='Whether or not to skip TLS certificate validation')
190+
parser.add_argument(
191+
'--dashboards-dir',
192+
default="dashboards",
193+
help='Directory of jsonnet dashboards to deploy',
194+
)
195+
parser.add_argument(
196+
'--folder-name',
197+
default='JupyterHub Default Dashboards',
198+
help='Name of Folder to deploy to',
199+
)
200+
parser.add_argument(
201+
'--folder-uid',
202+
default=DEFAULT_FOLDER_UID,
203+
help='UID of grafana folder to deploy to',
204+
)
205+
parser.add_argument(
206+
'--no-tls-verify',
207+
action='store_true',
208+
default=False,
209+
help='Whether or not to skip TLS certificate validation',
210+
)
196211

197212
args = parser.parse_args()
198213

199214
grafana_token = os.environ['GRAFANA_TOKEN']
200215

201-
api = partial(grafana_request, args.grafana_url, grafana_token, no_tls_verify=args.no_tls_verify)
216+
api = partial(
217+
grafana_request,
218+
args.grafana_url,
219+
grafana_token,
220+
no_tls_verify=args.no_tls_verify,
221+
)
202222
folder = ensure_folder(args.folder_name, args.folder_uid, api)
203223

204224
for dashboard in glob(f'{args.dashboards_dir}/*.jsonnet'):

noxfile.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
- Install nox: pip install nox
1010
- Start a live reloading docs server: nox -s docs -- live
1111
"""
12+
1213
import nox
1314

1415
nox.options.reuse_existing_virtualenvs = True

0 commit comments

Comments
 (0)