1
0
Fork 0

Merge pull request #4 from thouveng/fix-datasource

Fix datasource
This commit is contained in:
pasquier-s 2016-11-02 12:15:13 +01:00 committed by GitHub
commit 667880a0c8
31 changed files with 51113 additions and 29 deletions

View File

@ -5,31 +5,240 @@ Grafana
A beautiful, easy to use and feature rich Graphite dashboard replacement and graph editor.
Sample pillars
==============
Sample pillar installed from system package
Server deployments
------------------
Server installed from system package
.. code-block:: yaml
grafana:
server:
enabled: true
admin:
user: admin
password: passwd
database:
engine: sqlite
Server installed with PostgreSQL database
.. code-block:: yaml
grafana:
server:
enabled: true
admin:
user: admin
password: passwd
database:
engine: postgresql
host: localhost
port: 5432
data_source:
metrics1:
engine: graphite
host: metrics1.domain.com
ssl: true
name: grafana
user: grafana
password: passwd
Server installed with default StackLight JSON dashboards
.. code-block:: yaml
grafana:
server:
enabled: true
dashboards:
enabled: true
path: /var/lib/grafana/dashboards
Server with theme overrides
.. code-block:: yaml
grafana:
server:
enabled: true
theme:
light:
css_override:
source: http://path.to.theme
source_hash: sha256=xyz
build: xyz
dark:
css_override:
source: salt://path.to.theme
Collector setup
---------------
Used to aggregate dashboards from monitoring node.
.. code-block:: yaml
grafana:
collector:
enabled: true
Client setups
-------------
Client with token based auth
.. code-block:: yaml
grafana:
client:
enabled: true
server:
protocol: https
host: grafana.host
port: 3000
token: token
Client with base auth
.. code-block:: yaml
grafana:
client:
enabled: true
server:
protocol: https
host: grafana.host
port: 3000
user: admin
password: password
Client enforcing graphite data source
.. code-block:: yaml
grafana:
client:
enabled: true
datasource:
graphite:
type: graphite
host: mtr01.domain.com
protocol: https
port: 443
user: test
metrics2:
engine: elasticsearch
host: metrics2.domain.com
Client enforcing elasticsearch data source
.. code-block:: yaml
grafana:
client:
enabled: true
datasource:
elasticsearch:
type: elasticsearch
host: log01.domain.com
port: 80
user: test
index: grafana-dash
Client defined and enforced dashboard
.. code-block:: yaml
grafana:
client:
enabled: true
server:
host: grafana.host
port: 3000
token: token
dashboard:
system_metrics:
title: "Generic system metrics"
style: dark
editable: false
row:
top:
title: "First row"
Client enforced dashboards defined in salt-mine
.. code-block:: yaml
grafana:
client:
enabled: true
remote_data:
engine: salt_mine
server:
host: grafana.host
port: 3000
token: token
Usage
=====
There's a difference between JSON dashboard representation and models we us.
The lists used in JSON format [for rows, panels and target] were replaced by
dictionaries. This form of serialization allows better merging and overrides
of hierarchical data structures that dashboard models are.
The default format of Grafana dashboards with lists for rows, panels and targets.
.. code-block:: yaml
system_metrics:
title: graph
editable: true
hideControls: false
rows:
- title: Usage
height: 250px
panels:
- title: Panel Title
span: 6
editable: false
type: graph
targets:
- refId: A
target: "support_prd.cfg01_iot_tcpcloud_eu.cpu.0.idle"
datasource: graphite01
renderer: flot
showTitle: true
The modified version of Grafana dashboard format with dictionary declarations.
Please note that dictionary keys are only for logical separation and are not
displayed in generated dashboards.
.. code-block:: yaml
system_metrics:
system_metrics2:
title: graph
editable: true
hideControls: false
row:
usage:
title: Usage
height: 250px
panel:
usage-panel:
title: Panel Title
span: 6
editable: false
type: graph
target:
A:
refId: A
target: "support_prd.cfg01_iot_tcpcloud_eu.cpu.0.idle"
datasource: graphite01
renderer: flot
showTitle: true
Read more
=========

View File

@ -0,0 +1,571 @@
# -*- coding: utf-8 -*-
'''
Manage Grafana v3.0 Dashboards
.. versionadded:: 2016.3.0
.. code-block:: yaml
grafana:
grafana_timeout: 3
grafana_token: qwertyuiop
grafana_url: 'https://url.com'
.. code-block:: yaml
Ensure minimum dashboard is managed:
grafana_dashboard.present:
- name: insightful-dashboard
- base_dashboards_from_pillar:
- default_dashboard
- base_rows_from_pillar:
- default_row
- base_panels_from_pillar:
- default_panel
- dashboard:
rows:
- title: Usage
panels:
- targets:
- target: alias(constantLine(50), 'max')
title: Imaginary
type: graph
The behavior of this module is to create dashboards if they do not exist, to
add rows if they do not exist in existing dashboards, and to update rows if
they exist in dashboards. The module will not manage rows that are not defined,
allowing users to manage their own custom rows.
'''
# Import Python libs
from __future__ import absolute_import
import copy
import json
import requests
# Import Salt libs
import salt.ext.six as six
from salt.utils.dictdiffer import DictDiffer
def __virtual__():
'''Only load if grafana v2.0 is configured.'''
return __salt__['config.get']('grafana_version', 1) == 3
_DEFAULT_DASHBOARD_PILLAR = 'grafana_dashboards:default'
_DEFAULT_PANEL_PILLAR = 'grafana_panels:default'
_DEFAULT_ROW_PILLAR = 'grafana_rows:default'
_PINNED_ROWS_PILLAR = 'grafana_pinned_rows'
def present(name,
base_dashboards_from_pillar=None,
base_panels_from_pillar=None,
base_rows_from_pillar=None,
dashboard=None,
profile='grafana'):
'''
Ensure the grafana dashboard exists and is managed.
name
Name of the grafana dashboard.
base_dashboards_from_pillar
A pillar key that contains a list of dashboards to inherit from
base_panels_from_pillar
A pillar key that contains a list of panels to inherit from
base_rows_from_pillar
A pillar key that contains a list of rows to inherit from
dashboard
A dict that defines a dashboard that should be managed.
profile
A pillar key or dict that contains grafana information
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
base_dashboards_from_pillar = base_dashboards_from_pillar or []
base_panels_from_pillar = base_panels_from_pillar or []
base_rows_from_pillar = base_rows_from_pillar or []
dashboard = dashboard or {}
if isinstance(profile, six.string_types):
profile = __salt__['config.option'](profile)
# Add pillar keys for default configuration
base_dashboards_from_pillar = ([_DEFAULT_DASHBOARD_PILLAR] +
base_dashboards_from_pillar)
base_panels_from_pillar = ([_DEFAULT_PANEL_PILLAR] +
base_panels_from_pillar)
base_rows_from_pillar = [_DEFAULT_ROW_PILLAR] + base_rows_from_pillar
# Build out all dashboard fields
new_dashboard = _inherited_dashboard(
dashboard, base_dashboards_from_pillar, ret)
new_dashboard['title'] = name
rows = new_dashboard.get('rows', [])
for i, row in enumerate(rows):
rows[i] = _inherited_row(row, base_rows_from_pillar, ret)
for row in rows:
panels = row.get('panels', [])
for i, panel in enumerate(panels):
panels[i] = _inherited_panel(panel, base_panels_from_pillar, ret)
_auto_adjust_panel_spans(new_dashboard)
_ensure_panel_ids(new_dashboard)
_ensure_annotations(new_dashboard)
# Create dashboard if it does not exist
url = 'db/{0}'.format(name)
old_dashboard = _get(url, profile)
if not old_dashboard:
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'Dashboard {0} is set to be created.'.format(name)
return ret
response = _update(new_dashboard, profile)
if response.get('status') == 'success':
ret['comment'] = 'Dashboard {0} created.'.format(name)
ret['changes']['new'] = 'Dashboard {0} created.'.format(name)
else:
ret['result'] = False
ret['comment'] = ("Failed to create dashboard {0}, "
"response={1}").format(name, response)
return ret
# Add unmanaged rows to the dashboard. They appear at the top if they are
# marked as pinned. They appear at the bottom otherwise.
managed_row_titles = [row.get('title')
for row in new_dashboard.get('rows', [])]
new_rows = new_dashboard.get('rows', [])
for old_row in old_dashboard.get('rows', []):
if old_row.get('title') not in managed_row_titles:
new_rows.append(copy.deepcopy(old_row))
_ensure_pinned_rows(new_dashboard)
_ensure_panel_ids(new_dashboard)
# Update dashboard if it differs
dashboard_diff = DictDiffer(_cleaned(new_dashboard),
_cleaned(old_dashboard))
updated_needed = (dashboard_diff.changed() or
dashboard_diff.added() or
dashboard_diff.removed())
if updated_needed:
if __opts__['test']:
ret['result'] = None
ret['comment'] = ('Dashboard {0} is set to be updated, '
'changes={1}').format(
name,
json.dumps(
_dashboard_diff(
_cleaned(new_dashboard),
_cleaned(old_dashboard)
),
indent=4
))
return ret
response = _update(new_dashboard, profile)
if response.get('status') == 'success':
updated_dashboard = _get(url, profile)
dashboard_diff = DictDiffer(_cleaned(updated_dashboard),
_cleaned(old_dashboard))
ret['comment'] = 'Dashboard {0} updated.'.format(name)
ret['changes'] = _dashboard_diff(_cleaned(new_dashboard),
_cleaned(old_dashboard))
else:
ret['result'] = False
ret['comment'] = ("Failed to update dashboard {0}, "
"response={1}").format(name, response)
return ret
ret['comment'] = 'Dashboard present'
return ret
def absent(name, profile='grafana'):
'''
Ensure the named grafana dashboard is absent.
name
Name of the grafana dashboard.
profile
A pillar key or dict that contains grafana information
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
if isinstance(profile, six.string_types):
profile = __salt__['config.option'](profile)
url = 'db/{0}'.format(name)
existing_dashboard = _get(url, profile)
if existing_dashboard:
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'Dashboard {0} is set to be deleted.'.format(name)
return ret
_delete(url, profile)
ret['comment'] = 'Dashboard {0} deleted.'.format(name)
ret['changes']['new'] = 'Dashboard {0} deleted.'.format(name)
return ret
ret['comment'] = 'Dashboard absent'
return ret
_IGNORED_DASHBOARD_FIELDS = [
'id',
'originalTitle',
'version',
]
_IGNORED_ROW_FIELDS = []
_IGNORED_PANEL_FIELDS = [
'grid',
'mode',
'tooltip',
]
_IGNORED_TARGET_FIELDS = [
'textEditor',
]
def _cleaned(_dashboard):
'''Return a copy without fields that can differ.'''
dashboard = copy.deepcopy(_dashboard)
for ignored_dashboard_field in _IGNORED_DASHBOARD_FIELDS:
dashboard.pop(ignored_dashboard_field, None)
for row in dashboard.get('rows', []):
for ignored_row_field in _IGNORED_ROW_FIELDS:
row.pop(ignored_row_field, None)
for i, panel in enumerate(row.get('panels', [])):
for ignored_panel_field in _IGNORED_PANEL_FIELDS:
panel.pop(ignored_panel_field, None)
for target in panel.get('targets', []):
for ignored_target_field in _IGNORED_TARGET_FIELDS:
target.pop(ignored_target_field, None)
row['panels'][i] = _stripped(panel)
return dashboard
def _inherited_dashboard(dashboard, base_dashboards_from_pillar, ret):
'''Return a dashboard with properties from parents.'''
base_dashboards = []
for base_dashboard_from_pillar in base_dashboards_from_pillar:
base_dashboard = __salt__['pillar.get'](base_dashboard_from_pillar)
if base_dashboard:
base_dashboards.append(base_dashboard)
elif base_dashboard_from_pillar != _DEFAULT_DASHBOARD_PILLAR:
ret.setdefault('warnings', [])
warning_message = 'Cannot find dashboard pillar "{0}".'.format(
base_dashboard_from_pillar)
if warning_message not in ret['warnings']:
ret['warnings'].append(warning_message)
base_dashboards.append(dashboard)
result_dashboard = {}
tags = set()
for dashboard in base_dashboards:
tags.update(dashboard.get('tags', []))
result_dashboard.update(dashboard)
result_dashboard['tags'] = list(tags)
return result_dashboard
def _inherited_row(row, base_rows_from_pillar, ret):
'''Return a row with properties from parents.'''
base_rows = []
for base_row_from_pillar in base_rows_from_pillar:
base_row = __salt__['pillar.get'](base_row_from_pillar)
if base_row:
base_rows.append(base_row)
elif base_row_from_pillar != _DEFAULT_ROW_PILLAR:
ret.setdefault('warnings', [])
warning_message = 'Cannot find row pillar "{0}".'.format(
base_row_from_pillar)
if warning_message not in ret['warnings']:
ret['warnings'].append(warning_message)
base_rows.append(row)
result_row = {}
for row in base_rows:
result_row.update(row)
return result_row
def _inherited_panel(panel, base_panels_from_pillar, ret):
'''Return a panel with properties from parents.'''
base_panels = []
for base_panel_from_pillar in base_panels_from_pillar:
base_panel = __salt__['pillar.get'](base_panel_from_pillar)
if base_panel:
base_panels.append(base_panel)
elif base_panel_from_pillar != _DEFAULT_PANEL_PILLAR:
ret.setdefault('warnings', [])
warning_message = 'Cannot find panel pillar "{0}".'.format(
base_panel_from_pillar)
if warning_message not in ret['warnings']:
ret['warnings'].append(warning_message)
base_panels.append(panel)
result_panel = {}
for panel in base_panels:
result_panel.update(panel)
return result_panel
_FULL_LEVEL_SPAN = 12
_DEFAULT_PANEL_SPAN = 2.5
def _auto_adjust_panel_spans(dashboard):
'''Adjust panel spans to take up the available width.
For each group of panels that would be laid out on the same level, scale up
the unspecified panel spans to fill up the level.
'''
for row in dashboard.get('rows', []):
levels = []
current_level = []
levels.append(current_level)
for panel in row.get('panels', []):
current_level_span = sum(panel.get('span', _DEFAULT_PANEL_SPAN)
for panel in current_level)
span = panel.get('span', _DEFAULT_PANEL_SPAN)
if current_level_span + span > _FULL_LEVEL_SPAN:
current_level = [panel]
levels.append(current_level)
else:
current_level.append(panel)
for level in levels:
specified_panels = [panel for panel in level if 'span' in panel]
unspecified_panels = [panel for panel in level
if 'span' not in panel]
if not unspecified_panels:
continue
specified_span = sum(panel['span'] for panel in specified_panels)
available_span = _FULL_LEVEL_SPAN - specified_span
auto_span = float(available_span) / len(unspecified_panels)
for panel in unspecified_panels:
panel['span'] = auto_span
def _ensure_pinned_rows(dashboard):
'''Pin rows to the top of the dashboard.'''
pinned_row_titles = __salt__['pillar.get'](_PINNED_ROWS_PILLAR)
if not pinned_row_titles:
return
pinned_row_titles_lower = []
for title in pinned_row_titles:
pinned_row_titles_lower.append(title.lower())
rows = dashboard.get('rows', [])
pinned_rows = []
for i, row in enumerate(rows):
if row.get('title', '').lower() in pinned_row_titles_lower:
del rows[i]
pinned_rows.append(row)
rows = pinned_rows + rows
def _ensure_panel_ids(dashboard):
'''Assign panels auto-incrementing IDs.'''
panel_id = 1
for row in dashboard.get('rows', []):
for panel in row.get('panels', []):
panel['id'] = panel_id
panel_id += 1
def _ensure_annotations(dashboard):
'''Explode annotation_tags into annotations.'''
if 'annotation_tags' not in dashboard:
return
tags = dashboard['annotation_tags']
annotations = {
'enable': True,
'list': [],
}
for tag in tags:
annotations['list'].append({
'datasource': "graphite",
'enable': False,
'iconColor': "#C0C6BE",
'iconSize': 13,
'lineColor': "rgba(255, 96, 96, 0.592157)",
'name': tag,
'showLine': True,
'tags': tag,
})
del dashboard['annotation_tags']
dashboard['annotations'] = annotations
def _get(url, profile):
'''Get a specific dashboard.'''
request_url = "{0}/api/dashboards/{1}".format(profile.get('grafana_url'),
url)
if profile.get('grafana_token', False):
response = requests.get(
request_url,
headers=_get_headers(profile),
timeout=profile.get('grafana_timeout', 3),
)
else:
response = requests.get(
request_url,
auth=_get_auth(profile),
timeout=profile.get('grafana_timeout', 3),
)
data = response.json()
if data.get('message') == 'Not found':
return None
if 'dashboard' not in data:
return None
return data['dashboard']
def _delete(url, profile):
'''Delete a specific dashboard.'''
request_url = "{0}/api/dashboards/{1}".format(profile.get('grafana_url'),
url)
if profile.get('grafana_token', False):
response = requests.delete(
request_url,
headers=_get_headers(profile),
timeout=profile.get('grafana_timeout'),
)
else:
response = requests.delete(
request_url,
auth=_get_auth(profile),
timeout=profile.get('grafana_timeout'),
)
data = response.json()
return data
def _update(dashboard, profile):
'''Update a specific dashboard.'''
payload = {
'dashboard': dashboard,
'overwrite': True
}
request_url = "{0}/api/dashboards/db".format(profile.get('grafana_url'))
if profile.get('grafana_token', False):
response = requests.post(
request_url,
headers=_get_headers(profile),
json=payload
)
else:
response = requests.post(
request_url,
auth=_get_auth(profile),
json=payload
)
return response.json()
def _get_headers(profile):
return {
'Accept': 'application/json',
'Authorization': 'Bearer {0}'.format(profile['grafana_token'])
}
def _get_auth(profile):
return requests.auth.HTTPBasicAuth(
profile['grafana_user'],
profile['grafana_password']
)
def _dashboard_diff(_new_dashboard, _old_dashboard):
'''Return a dictionary of changes between dashboards.'''
diff = {}
# Dashboard diff
new_dashboard = copy.deepcopy(_new_dashboard)
old_dashboard = copy.deepcopy(_old_dashboard)
dashboard_diff = DictDiffer(new_dashboard, old_dashboard)
diff['dashboard'] = _stripped({
'changed': list(dashboard_diff.changed()) or None,
'added': list(dashboard_diff.added()) or None,
'removed': list(dashboard_diff.removed()) or None,
})
# Row diff
new_rows = new_dashboard.get('rows', [])
old_rows = old_dashboard.get('rows', [])
new_rows_by_title = {}
old_rows_by_title = {}
for row in new_rows:
if 'title' in row:
new_rows_by_title[row['title']] = row
for row in old_rows:
if 'title' in row:
old_rows_by_title[row['title']] = row
rows_diff = DictDiffer(new_rows_by_title, old_rows_by_title)
diff['rows'] = _stripped({
'added': list(rows_diff.added()) or None,
'removed': list(rows_diff.removed()) or None,
})
for changed_row_title in rows_diff.changed():
old_row = old_rows_by_title[changed_row_title]
new_row = new_rows_by_title[changed_row_title]
row_diff = DictDiffer(new_row, old_row)
diff['rows'].setdefault('changed', {})
diff['rows']['changed'][changed_row_title] = _stripped({
'changed': list(row_diff.changed()) or None,
'added': list(row_diff.added()) or None,
'removed': list(row_diff.removed()) or None,
})
# Panel diff
old_panels_by_id = {}
new_panels_by_id = {}
for row in old_dashboard.get('rows', []):
for panel in row.get('panels', []):
if 'id' in panel:
old_panels_by_id[panel['id']] = panel
for row in new_dashboard.get('rows', []):
for panel in row.get('panels', []):
if 'id' in panel:
new_panels_by_id[panel['id']] = panel
panels_diff = DictDiffer(new_panels_by_id, old_panels_by_id)
diff['panels'] = _stripped({
'added': list(panels_diff.added()) or None,
'removed': list(panels_diff.removed()) or None,
})
for changed_panel_id in panels_diff.changed():
old_panel = old_panels_by_id[changed_panel_id]
new_panel = new_panels_by_id[changed_panel_id]
panels_diff = DictDiffer(new_panel, old_panel)
diff['panels'].setdefault('changed', {})
diff['panels']['changed'][changed_panel_id] = _stripped({
'changed': list(panels_diff.changed()) or None,
'added': list(panels_diff.added()) or None,
'removed': list(panels_diff.removed()) or None,
})
return diff
def _stripped(d):
'''Strip falsey entries.'''
ret = {}
for k, v in six.iteritems(d):
if v:
ret[k] = v
return ret

View File

@ -0,0 +1,271 @@
# -*- coding: utf-8 -*-
'''
Manage Grafana v3.0 data sources
.. versionadded:: 2016.3.0
Token auth setup
.. code-block:: yaml
grafana:
grafana_version: 3
grafana_timeout: 5
grafana_token: qwertyuiop
grafana_url: 'https://url.com'
Basic auth setup
.. code-block:: yaml
grafana:
grafana_version: 3
grafana_timeout: 5
grafana_user: grafana
grafana_password: qwertyuiop
grafana_url: 'https://url.com'
.. code-block:: yaml
Ensure influxdb data source is present:
grafana_datasource.present:
- name: influxdb
- type: influxdb
- url: http://localhost:8086
- access: proxy
- basic_auth: true
- basic_auth_user: myuser
- basic_auth_password: mypass
- is_default: true
'''
from __future__ import absolute_import
import requests
from salt.ext.six import string_types
def __virtual__():
'''Only load if grafana v3.0 is configured.'''
return __salt__['config.get']('grafana_version', 1) == 3
def present(name,
type,
url,
access='proxy',
user='',
password='',
database='',
basic_auth=False,
basic_auth_user='',
basic_auth_password='',
is_default=False,
type_logo_url='public/app/plugins/datasource/graphite/img/graphite_logo.png',
with_credentials=False,
json_data=None,
profile='grafana'):
'''
Ensure that a data source is present.
name
Name of the data source.
type
Which type of data source it is ('graphite', 'influxdb' etc.).
url
The URL to the data source API.
user
Optional - user to authenticate with the data source
password
Optional - password to authenticate with the data source
basic_auth
Optional - set to True to use HTTP basic auth to authenticate with the
data source.
basic_auth_user
Optional - HTTP basic auth username.
basic_auth_password
Optional - HTTP basic auth password.
is_default
Default: False
'''
if isinstance(profile, string_types):
profile = __salt__['config.option'](profile)
ret = {'name': name, 'result': None, 'comment': None, 'changes': None}
datasource = _get_datasource(profile, name)
data = _get_json_data(name, type, url, access, user, password, database,
basic_auth, basic_auth_user, basic_auth_password, is_default, json_data)
if datasource:
if profile.get('grafana_token', False):
requests.put(
_get_url(profile, datasource['id']),
data,
headers=_get_headers(profile),
timeout=profile.get('grafana_timeout', 3),
)
else:
requests.put(
_get_url(profile, datasource['id']),
data,
auth=_get_auth(profile),
timeout=profile.get('grafana_timeout', 3),
)
ret['result'] = True
ret['changes'] = _diff(datasource, data)
if ret['changes']['new'] or ret['changes']['old']:
ret['comment'] = 'Data source {0} updated'.format(name)
else:
ret['changes'] = None
ret['comment'] = 'Data source {0} already up-to-date'.format(name)
else:
if profile.get('grafana_token', False):
requests.post(
'{0}/api/datasources'.format(profile['grafana_url']),
data,
headers=_get_headers(profile),
timeout=profile.get('grafana_timeout', 3),
)
else:
requests.put(
'{0}/api/datasources'.format(profile['grafana_url']),
data,
auth=_get_auth(profile),
timeout=profile.get('grafana_timeout', 3),
)
ret['result'] = True
ret['comment'] = 'New data source {0} added'.format(name)
ret['changes'] = data
return ret
def absent(name, profile='grafana'):
'''
Ensure that a data source is present.
name
Name of the data source to remove.
'''
if isinstance(profile, string_types):
profile = __salt__['config.option'](profile)
ret = {'result': None, 'comment': None, 'changes': None}
datasource = _get_datasource(profile, name)
if not datasource:
ret['result'] = True
ret['comment'] = 'Data source {0} already absent'.format(name)
return ret
if profile.get('grafana_token', False):
requests.delete(
_get_url(profile, datasource['id']),
headers=_get_headers(profile),
timeout=profile.get('grafana_timeout', 3),
)
else:
requests.delete(
_get_url(profile, datasource['id']),
auth=_get_auth(profile),
timeout=profile.get('grafana_timeout', 3),
)
ret['result'] = True
ret['comment'] = 'Data source {0} was deleted'.format(name)
return ret
def _get_url(profile, datasource_id):
return '{0}/api/datasources/{1}'.format(
profile['grafana_url'],
datasource_id
)
def _get_datasource(profile, name):
if profile.get('grafana_token', False):
response = requests.get(
'{0}/api/datasources'.format(profile['grafana_url']),
headers=_get_headers(profile),
timeout=profile.get('grafana_timeout', 3),
)
else:
response = requests.get(
'{0}/api/datasources'.format(profile['grafana_url']),
auth=_get_auth(profile),
timeout=profile.get('grafana_timeout', 3),
)
data = response.json()
for datasource in data:
if datasource['name'] == name:
return datasource
return None
def _get_headers(profile):
return {
'Accept': 'application/json',
'Authorization': 'Bearer {0}'.format(profile['grafana_token'])
}
def _get_auth(profile):
return requests.auth.HTTPBasicAuth(
profile['grafana_user'],
profile['grafana_password']
)
def _get_json_data(name,
type,
url,
access='proxy',
user='',
password='',
database='',
basic_auth=False,
basic_auth_user='',
basic_auth_password='',
is_default=False,
type_logo_url='public/app/plugins/datasource/graphite/img/graphite_logo.png',
with_credentials=False,
json_data=None):
return {
'name': name,
'type': type,
'url': url,
'access': access,
'user': user,
'password': password,
'database': database,
'basicAuth': basic_auth,
'basicAuthUser': basic_auth_user,
'basicAuthPassword': basic_auth_password,
'isDefault': is_default,
'typeLogoUrl': type_logo_url,
'withCredentials': with_credentials,
'jsonData': json_data,
}
def _diff(old, new):
old_keys = old.keys()
old = old.copy()
new = new.copy()
for key in old_keys:
if key == 'id' or key == 'orgId':
del old[key]
elif old[key] == new[key]:
del old[key]
del new[key]
return {'old': old, 'new': new}

82
grafana/client.sls Normal file
View File

@ -0,0 +1,82 @@
{%- from "grafana/map.jinja" import client with context %}
{%- if client.enabled %}
/etc/salt/minion.d/_grafana.conf:
file.managed:
- source: salt://grafana/files/_grafana.conf
- template: jinja
- user: root
- group: root
{%- for datasource_name, datasource in client.datasource.iteritems() %}
grafana_client_datasource_{{ datasource_name }}:
grafana3_datasource.present:
- name: {{ datasource_name }}
- type: {{ datasource.type }}
- url: http://{{ datasource.host }}:{{ datasource.get('port', 80) }}
{%- if datasource.access is defined %}
- access: proxy
{%- endif %}
{%- if datasource.user is defined %}
- basic_auth: true
- basic_auth_user: {{ datasource.user }}
- basic_auth_password: {{ datasource.password }}
{%- endif %}
{%- endfor %}
{%- set raw_dict = {} %}
{%- set final_dict = {} %}
{%- if client.remote_data.engine == 'salt_mine' %}
{%- for node_name, node_grains in salt['mine.get']('*', 'grains.items').iteritems() %}
{%- if node_grains.grafana is defined %}
{%- set raw_dict = salt['grains.filter_by']({'default': raw_dict}, merge=node_grains.grafana.get('dashboard', {})) %}
{%- endif %}
{%- endfor %}
{%- endif %}
{%- if client.dashboard is defined %}
{%- set raw_dict = salt['grains.filter_by']({'default': raw_dict}, merge=client.dashboard) %}
{%- endif %}
{%- for dashboard_name, dashboard in raw_dict.iteritems() %}
{%- set rows = [] %}
{%- for row_name, row in dashboard.get('row', {}).iteritems() %}
{%- set panels = [] %}
{%- for panel_name, panel in row.get('panel', {}).iteritems() %}
{%- set targets = [] %}
{%- for target_name, target in panel.get('target', {}).iteritems() %}
{%- do targets.extend([target]) %}
{%- endfor %}
{%- do panel.update({'targets': targets}) %}
{%- do panels.extend([panel]) %}
{%- endfor %}
{%- do row.update({'panels': panels}) %}
{%- do rows.extend([row]) %}
{%- endfor %}
{%- do dashboard.update({'rows': rows}) %}
{%- do final_dict.update({dashboard_name: dashboard}) %}
{%- endfor %}
{%- for dashboard_name, dashboard in final_dict.iteritems() %}
{%- if dashboard.get('enabled', True) %}
grafana_client_dashboard_{{ dashboard_name }}:
grafana3_dashboard.present:
- name: {{ dashboard_name }}
- dashboard: {{ dashboard }}
{%- else %}
grafana_client_dashboard_{{ dashboard_name }}:
grafana3_dashboard.absent:
- name: {{ dashboard_name }}
{%- endif %}
{%- endfor %}
{%- endif %}

50
grafana/collector.sls Normal file
View File

@ -0,0 +1,50 @@
{%- from "grafana/map.jinja" import collector with context %}
{%- if collector.enabled %}
grafana_grains_dir:
file.directory:
- name: /etc/salt/grains.d
- mode: 700
- makedirs: true
- user: root
{%- set service_grains = {} %}
{# Loading the other service support metadata for localhost #}
{%- for service_name, service in pillar.iteritems() %}
{%- macro load_grains_file(grains_fragment_file) %}{% include grains_fragment_file ignore missing %}{% endmacro %}
{%- set grains_fragment_file = service_name+'/meta/grafana.yml' %}
{%- set grains_yaml = load_grains_file(grains_fragment_file)|load_yaml %}
{%- set service_grains = salt['grains.filter_by']({'default': service_grains}, merge=grains_yaml) %}
{%- endfor %}
grafana_grain:
file.managed:
- name: /etc/salt/grains.d/grafana
- source: salt://grafana/files/grafana.grain
- template: jinja
- user: root
- mode: 600
- defaults:
service_grains:
grafana: {{ service_grains|yaml }}
- require:
- file: grafana_grains_dir
grafana_grains_file:
cmd.wait:
- name: cat /etc/salt/grains.d/* > /etc/salt/grains
- watch:
- file: grafana_grain
grafana_grains_publish:
module.run:
- name: mine.update
- watch:
- cmd: grafana_grains_file
{%- endif %}

View File

@ -0,0 +1,13 @@
{%- from "grafana/map.jinja" import client with context %}
grafana_version: {{ client.server.get('version', 3) }}
grafana:
grafana_timeout: 3
{%- if client.server.token is defined %}
grafana_token: {{ client.server.token }}
{%- else %}
grafana_user: {{ client.server.user }}
grafana_password: {{ client.server.password }}
{%- endif %}
grafana_url: '{{ client.server.get('protocol', 'http') }}://{{ client.server.host }}:{{ client.server.get('port', 80) }}'

View File

@ -0,0 +1,898 @@
{
"annotations": {
"list": [
{
"datasource": "lma",
"enable": true,
"iconColor": "#C0C6BE",
"iconSize": 13,
"lineColor": "rgba(255, 96, 96, 0.592157)",
"name": "Status",
"query": "select title,tags,text from annotations where $timeFilter and cluster = 'apache'",
"showLine": true,
"tagsColumn": "tags",
"textColumn": "text",
"titleColumn": "title"
}
]
},
"editable": true,
"hideControls": false,
"id": null,
"links": [],
"originalTitle": "Apache",
"refresh": "1m",
"rows": [
{
"collapse": false,
"editable": true,
"height": "250px",
"panels": [
{
"cacheTimeout": null,
"colorBackground": true,
"colorValue": false,
"colors": [
"rgba(71, 212, 59, 0.4)",
"rgba(241, 181, 37, 0.73)",
"rgba(225, 40, 40, 0.59)"
],
"datasource": null,
"editable": true,
"error": false,
"format": "none",
"gauge": {
"maxValue": 100,
"minValue": 0,
"show": false,
"thresholdLabels": false,
"thresholdMarkers": true
},
"id": 11,
"interval": "> 60s",
"links": [],
"maxDataPoints": 100,
"nullPointMode": "connected",
"nullText": null,
"postfix": "",
"postfixFontSize": "50%",
"prefix": "",
"prefixFontSize": "50%",
"span": 3,
"sparkline": {
"fillColor": "rgba(31, 118, 189, 0.18)",
"full": false,
"lineColor": "rgb(31, 120, 193)",
"show": false
},
"targets": [
{
"column": "value",
"condition": "",
"dsType": "influxdb",
"fill": "",
"function": "last",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"groupByTags": [],
"groupby_field": "",
"interval": "",
"measurement": "cluster_status",
"policy": "default",
"query": "SELECT last(\"value\") FROM \"cluster_status\" WHERE \"cluster_name\" = 'apache' AND $timeFilter GROUP BY time($interval) fill(null)",
"rawQuery": false,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "last"
}
]
],
"tags": [
{
"key": "environment_label",
"operator": "=",
"value": "$environment"
},
{
"key": "cluster_name",
"operator": "=",
"value": "apache"
}
]
}
],
"thresholds": "1,3",
"title": "",
"type": "singlestat",
"valueFontSize": "80%",
"valueMaps": [
{
"op": "=",
"text": "no data",
"value": "null"
},
{
"op": "=",
"text": "OKAY",
"value": "0"
},
{
"op": "=",
"text": "WARN",
"value": "1"
},
{
"op": "=",
"text": "UNKN",
"value": "2"
},
{
"op": "=",
"text": "CRIT",
"value": "3"
},
{
"op": "=",
"text": "DOWN",
"value": "4"
}
],
"valueName": "current"
},
{
"aliasColors": {},
"bars": false,
"datasource": null,
"editable": true,
"error": false,
"fill": 1,
"grid": {
"threshold1": null,
"threshold1Color": "rgba(216, 200, 27, 0.27)",
"threshold2": null,
"threshold2Color": "rgba(234, 112, 112, 0.22)"
},
"id": 9,
"interval": "> 60s",
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": false,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 6,
"stack": false,
"steppedLine": false,
"targets": [
{
"column": "value",
"dsType": "influxdb",
"function": "mean",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"0"
],
"type": "fill"
}
],
"groupByTags": [],
"measurement": "apache_requests",
"policy": "default",
"query": "SELECT mean(\"value\") FROM \"apache_requests\" WHERE \"hostname\" = '$server' AND $timeFilter GROUP BY time($interval) fill(0)",
"rawQuery": false,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "hostname",
"value": "$server"
}
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Number of requests",
"tooltip": {
"msResolution": false,
"shared": false,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"show": true
},
"yaxes": [
{
"format": "short",
"label": "per second",
"logBase": 1,
"max": null,
"min": 0,
"show": true
},
{
"format": "short",
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
},
{
"aliasColors": {},
"bars": false,
"datasource": null,
"editable": true,
"error": false,
"fill": 1,
"grid": {
"threshold1": null,
"threshold1Color": "rgba(216, 200, 27, 0.27)",
"threshold2": null,
"threshold2Color": "rgba(234, 112, 112, 0.22)"
},
"id": 8,
"interval": "> 60s",
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": false,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 9,
"stack": false,
"steppedLine": false,
"targets": [
{
"column": "value",
"dsType": "influxdb",
"function": "mean",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"0"
],
"type": "fill"
}
],
"groupByTags": [],
"measurement": "apache_bytes",
"policy": "default",
"query": "SELECT mean(\"value\") FROM \"apache_bytes\" WHERE \"hostname\" = '$server' AND $timeFilter GROUP BY time($interval) fill(0)",
"rawQuery": false,