1
0
Fork 0

Merge pull request #4 from thouveng/fix-datasource

Fix datasource
This commit is contained in:
pasquier-s 2016-11-02 12:15:13 +01:00 committed by GitHub
commit 667880a0c8
31 changed files with 51113 additions and 29 deletions

View File

@ -5,31 +5,240 @@ Grafana
A beautiful, easy to use and feature rich Graphite dashboard replacement and graph editor.
Sample pillars
==============
Sample pillar installed from system package
Server deployments
------------------
Server installed from system package
.. code-block:: yaml
grafana:
server:
enabled: true
admin:
user: admin
password: passwd
database:
engine: sqlite
Server installed with PostgreSQL database
.. code-block:: yaml
grafana:
server:
enabled: true
admin:
user: admin
password: passwd
database:
engine: postgresql
host: localhost
port: 5432
data_source:
metrics1:
engine: graphite
host: metrics1.domain.com
ssl: true
name: grafana
user: grafana
password: passwd
Server installed with default StackLight JSON dashboards
.. code-block:: yaml
grafana:
server:
enabled: true
dashboards:
enabled: true
path: /var/lib/grafana/dashboards
Server with theme overrides
.. code-block:: yaml
grafana:
server:
enabled: true
theme:
light:
css_override:
source: http://path.to.theme
source_hash: sha256=xyz
build: xyz
dark:
css_override:
source: salt://path.to.theme
Collector setup
---------------
Used to aggregate dashboards from monitoring node.
.. code-block:: yaml
grafana:
collector:
enabled: true
Client setups
-------------
Client with token based auth
.. code-block:: yaml
grafana:
client:
enabled: true
server:
protocol: https
host: grafana.host
port: 3000
token: token
Client with base auth
.. code-block:: yaml
grafana:
client:
enabled: true
server:
protocol: https
host: grafana.host
port: 3000
user: admin
password: password
Client enforcing graphite data source
.. code-block:: yaml
grafana:
client:
enabled: true
datasource:
graphite:
type: graphite
host: mtr01.domain.com
protocol: https
port: 443
user: test
metrics2:
engine: elasticsearch
host: metrics2.domain.com
Client enforcing elasticsearch data source
.. code-block:: yaml
grafana:
client:
enabled: true
datasource:
elasticsearch:
type: elasticsearch
host: log01.domain.com
port: 80
user: test
index: grafana-dash
Client defined and enforced dashboard
.. code-block:: yaml
grafana:
client:
enabled: true
server:
host: grafana.host
port: 3000
token: token
dashboard:
system_metrics:
title: "Generic system metrics"
style: dark
editable: false
row:
top:
title: "First row"
Client enforced dashboards defined in salt-mine
.. code-block:: yaml
grafana:
client:
enabled: true
remote_data:
engine: salt_mine
server:
host: grafana.host
port: 3000
token: token
Usage
=====
There's a difference between JSON dashboard representation and models we us.
The lists used in JSON format [for rows, panels and target] were replaced by
dictionaries. This form of serialization allows better merging and overrides
of hierarchical data structures that dashboard models are.
The default format of Grafana dashboards with lists for rows, panels and targets.
.. code-block:: yaml
system_metrics:
title: graph
editable: true
hideControls: false
rows:
- title: Usage
height: 250px
panels:
- title: Panel Title
span: 6
editable: false
type: graph
targets:
- refId: A
target: "support_prd.cfg01_iot_tcpcloud_eu.cpu.0.idle"
datasource: graphite01
renderer: flot
showTitle: true
The modified version of Grafana dashboard format with dictionary declarations.
Please note that dictionary keys are only for logical separation and are not
displayed in generated dashboards.
.. code-block:: yaml
system_metrics:
system_metrics2:
title: graph
editable: true
hideControls: false
row:
usage:
title: Usage
height: 250px
panel:
usage-panel:
title: Panel Title
span: 6
editable: false
type: graph
target:
A:
refId: A
target: "support_prd.cfg01_iot_tcpcloud_eu.cpu.0.idle"
datasource: graphite01
renderer: flot
showTitle: true
Read more
=========

View File

@ -0,0 +1,571 @@
# -*- coding: utf-8 -*-
'''
Manage Grafana v3.0 Dashboards
.. versionadded:: 2016.3.0
.. code-block:: yaml
grafana:
grafana_timeout: 3
grafana_token: qwertyuiop
grafana_url: 'https://url.com'
.. code-block:: yaml
Ensure minimum dashboard is managed:
grafana_dashboard.present:
- name: insightful-dashboard
- base_dashboards_from_pillar:
- default_dashboard
- base_rows_from_pillar:
- default_row
- base_panels_from_pillar:
- default_panel
- dashboard:
rows:
- title: Usage
panels:
- targets:
- target: alias(constantLine(50), 'max')
title: Imaginary
type: graph
The behavior of this module is to create dashboards if they do not exist, to
add rows if they do not exist in existing dashboards, and to update rows if
they exist in dashboards. The module will not manage rows that are not defined,
allowing users to manage their own custom rows.
'''
# Import Python libs
from __future__ import absolute_import
import copy
import json
import requests
# Import Salt libs
import salt.ext.six as six
from salt.utils.dictdiffer import DictDiffer
def __virtual__():
'''Only load if grafana v2.0 is configured.'''
return __salt__['config.get']('grafana_version', 1) == 3
_DEFAULT_DASHBOARD_PILLAR = 'grafana_dashboards:default'
_DEFAULT_PANEL_PILLAR = 'grafana_panels:default'
_DEFAULT_ROW_PILLAR = 'grafana_rows:default'
_PINNED_ROWS_PILLAR = 'grafana_pinned_rows'
def present(name,
base_dashboards_from_pillar=None,
base_panels_from_pillar=None,
base_rows_from_pillar=None,
dashboard=None,
profile='grafana'):
'''
Ensure the grafana dashboard exists and is managed.
name
Name of the grafana dashboard.
base_dashboards_from_pillar
A pillar key that contains a list of dashboards to inherit from
base_panels_from_pillar
A pillar key that contains a list of panels to inherit from
base_rows_from_pillar
A pillar key that contains a list of rows to inherit from
dashboard
A dict that defines a dashboard that should be managed.
profile
A pillar key or dict that contains grafana information
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
base_dashboards_from_pillar = base_dashboards_from_pillar or []
base_panels_from_pillar = base_panels_from_pillar or []
base_rows_from_pillar = base_rows_from_pillar or []
dashboard = dashboard or {}
if isinstance(profile, six.string_types):
profile = __salt__['config.option'](profile)
# Add pillar keys for default configuration
base_dashboards_from_pillar = ([_DEFAULT_DASHBOARD_PILLAR] +
base_dashboards_from_pillar)
base_panels_from_pillar = ([_DEFAULT_PANEL_PILLAR] +
base_panels_from_pillar)
base_rows_from_pillar = [_DEFAULT_ROW_PILLAR] + base_rows_from_pillar
# Build out all dashboard fields
new_dashboard = _inherited_dashboard(
dashboard, base_dashboards_from_pillar, ret)
new_dashboard['title'] = name
rows = new_dashboard.get('rows', [])
for i, row in enumerate(rows):
rows[i] = _inherited_row(row, base_rows_from_pillar, ret)
for row in rows:
panels = row.get('panels', [])
for i, panel in enumerate(panels):
panels[i] = _inherited_panel(panel, base_panels_from_pillar, ret)
_auto_adjust_panel_spans(new_dashboard)
_ensure_panel_ids(new_dashboard)
_ensure_annotations(new_dashboard)
# Create dashboard if it does not exist
url = 'db/{0}'.format(name)
old_dashboard = _get(url, profile)
if not old_dashboard:
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'Dashboard {0} is set to be created.'.format(name)
return ret
response = _update(new_dashboard, profile)
if response.get('status') == 'success':
ret['comment'] = 'Dashboard {0} created.'.format(name)
ret['changes']['new'] = 'Dashboard {0} created.'.format(name)
else:
ret['result'] = False
ret['comment'] = ("Failed to create dashboard {0}, "
"response={1}").format(name, response)
return ret
# Add unmanaged rows to the dashboard. They appear at the top if they are
# marked as pinned. They appear at the bottom otherwise.
managed_row_titles = [row.get('title')
for row in new_dashboard.get('rows', [])]
new_rows = new_dashboard.get('rows', [])
for old_row in old_dashboard.get('rows', []):
if old_row.get('title') not in managed_row_titles:
new_rows.append(copy.deepcopy(old_row))
_ensure_pinned_rows(new_dashboard)
_ensure_panel_ids(new_dashboard)
# Update dashboard if it differs
dashboard_diff = DictDiffer(_cleaned(new_dashboard),
_cleaned(old_dashboard))
updated_needed = (dashboard_diff.changed() or
dashboard_diff.added() or
dashboard_diff.removed())
if updated_needed:
if __opts__['test']:
ret['result'] = None
ret['comment'] = ('Dashboard {0} is set to be updated, '
'changes={1}').format(
name,
json.dumps(
_dashboard_diff(
_cleaned(new_dashboard),
_cleaned(old_dashboard)
),
indent=4
))
return ret
response = _update(new_dashboard, profile)
if response.get('status') == 'success':
updated_dashboard = _get(url, profile)
dashboard_diff = DictDiffer(_cleaned(updated_dashboard),
_cleaned(old_dashboard))
ret['comment'] = 'Dashboard {0} updated.'.format(name)
ret['changes'] = _dashboard_diff(_cleaned(new_dashboard),
_cleaned(old_dashboard))
else:
ret['result'] = False
ret['comment'] = ("Failed to update dashboard {0}, "
"response={1}").format(name, response)
return ret
ret['comment'] = 'Dashboard present'
return ret
def absent(name, profile='grafana'):
'''
Ensure the named grafana dashboard is absent.
name
Name of the grafana dashboard.
profile
A pillar key or dict that contains grafana information
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
if isinstance(profile, six.string_types):
profile = __salt__['config.option'](profile)
url = 'db/{0}'.format(name)
existing_dashboard = _get(url, profile)
if existing_dashboard:
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'Dashboard {0} is set to be deleted.'.format(name)
return ret
_delete(url, profile)
ret['comment'] = 'Dashboard {0} deleted.'.format(name)
ret['changes']['new'] = 'Dashboard {0} deleted.'.format(name)
return ret
ret['comment'] = 'Dashboard absent'
return ret
_IGNORED_DASHBOARD_FIELDS = [
'id',
'originalTitle',
'version',
]
_IGNORED_ROW_FIELDS = []
_IGNORED_PANEL_FIELDS = [
'grid',
'mode',
'tooltip',
]
_IGNORED_TARGET_FIELDS = [
'textEditor',
]
def _cleaned(_dashboard):
'''Return a copy without fields that can differ.'''
dashboard = copy.deepcopy(_dashboard)
for ignored_dashboard_field in _IGNORED_DASHBOARD_FIELDS:
dashboard.pop(ignored_dashboard_field, None)
for row in dashboard.get('rows', []):
for ignored_row_field in _IGNORED_ROW_FIELDS:
row.pop(ignored_row_field, None)
for i, panel in enumerate(row.get('panels', [])):
for ignored_panel_field in _IGNORED_PANEL_FIELDS:
panel.pop(ignored_panel_field, None)
for target in panel.get('targets', []):
for ignored_target_field in _IGNORED_TARGET_FIELDS:
target.pop(ignored_target_field, None)
row['panels'][i] = _stripped(panel)
return dashboard
def _inherited_dashboard(dashboard, base_dashboards_from_pillar, ret):
'''Return a dashboard with properties from parents.'''
base_dashboards = []
for base_dashboard_from_pillar in base_dashboards_from_pillar:
base_dashboard = __salt__['pillar.get'](base_dashboard_from_pillar)
if base_dashboard:
base_dashboards.append(base_dashboard)
elif base_dashboard_from_pillar != _DEFAULT_DASHBOARD_PILLAR:
ret.setdefault('warnings', [])
warning_message = 'Cannot find dashboard pillar "{0}".'.format(
base_dashboard_from_pillar)
if warning_message not in ret['warnings']:
ret['warnings'].append(warning_message)
base_dashboards.append(dashboard)
result_dashboard = {}
tags = set()
for dashboard in base_dashboards:
tags.update(dashboard.get('tags', []))
result_dashboard.update(dashboard)
result_dashboard['tags'] = list(tags)
return result_dashboard
def _inherited_row(row, base_rows_from_pillar, ret):
'''Return a row with properties from parents.'''
base_rows = []
for base_row_from_pillar in base_rows_from_pillar:
base_row = __salt__['pillar.get'](base_row_from_pillar)
if base_row:
base_rows.append(base_row)
elif base_row_from_pillar != _DEFAULT_ROW_PILLAR:
ret.setdefault('warnings', [])
warning_message = 'Cannot find row pillar "{0}".'.format(
base_row_from_pillar)
if warning_message not in ret['warnings']:
ret['warnings'].append(warning_message)
base_rows.append(row)
result_row = {}
for row in base_rows:
result_row.update(row)
return result_row
def _inherited_panel(panel, base_panels_from_pillar, ret):
'''Return a panel with properties from parents.'''
base_panels = []
for base_panel_from_pillar in base_panels_from_pillar:
base_panel = __salt__['pillar.get'](base_panel_from_pillar)
if base_panel:
base_panels.append(base_panel)
elif base_panel_from_pillar != _DEFAULT_PANEL_PILLAR:
ret.setdefault('warnings', [])
warning_message = 'Cannot find panel pillar "{0}".'.format(
base_panel_from_pillar)
if warning_message not in ret['warnings']:
ret['warnings'].append(warning_message)
base_panels.append(panel)
result_panel = {}
for panel in base_panels:
result_panel.update(panel)
return result_panel
_FULL_LEVEL_SPAN = 12
_DEFAULT_PANEL_SPAN = 2.5
def _auto_adjust_panel_spans(dashboard):
'''Adjust panel spans to take up the available width.
For each group of panels that would be laid out on the same level, scale up
the unspecified panel spans to fill up the level.
'''
for row in dashboard.get('rows', []):
levels = []
current_level = []
levels.append(current_level)
for panel in row.get('panels', []):
current_level_span = sum(panel.get('span', _DEFAULT_PANEL_SPAN)
for panel in current_level)
span = panel.get('span', _DEFAULT_PANEL_SPAN)
if current_level_span + span > _FULL_LEVEL_SPAN:
current_level = [panel]
levels.append(current_level)
else:
current_level.append(panel)
for level in levels:
specified_panels = [panel for panel in level if 'span' in panel]
unspecified_panels = [panel for panel in level
if 'span' not in panel]
if not unspecified_panels:
continue
specified_span = sum(panel['span'] for panel in specified_panels)
available_span = _FULL_LEVEL_SPAN - specified_span
auto_span = float(available_span) / len(unspecified_panels)
for panel in unspecified_panels:
panel['span'] = auto_span
def _ensure_pinned_rows(dashboard):
'''Pin rows to the top of the dashboard.'''
pinned_row_titles = __salt__['pillar.get'](_PINNED_ROWS_PILLAR)
if not pinned_row_titles:
return
pinned_row_titles_lower = []
for title in pinned_row_titles:
pinned_row_titles_lower.append(title.lower())
rows = dashboard.get('rows', [])
pinned_rows = []
for i, row in enumerate(rows):
if row.get('title', '').lower() in pinned_row_titles_lower:
del rows[i]
pinned_rows.append(row)
rows = pinned_rows + rows
def _ensure_panel_ids(dashboard):
'''Assign panels auto-incrementing IDs.'''
panel_id = 1
for row in dashboard.get('rows', []):
for panel in row.get('panels', []):
panel['id'] = panel_id
panel_id += 1
def _ensure_annotations(dashboard):
'''Explode annotation_tags into annotations.'''
if 'annotation_tags' not in dashboard:
return
tags = dashboard['annotation_tags']
annotations = {
'enable': True,
'list': [],
}
for tag in tags:
annotations['list'].append({
'datasource': "graphite",
'enable': False,
'iconColor': "#C0C6BE",
'iconSize': 13,
'lineColor': "rgba(255, 96, 96, 0.592157)",
'name': tag,
'showLine': True,
'tags': tag,
})
del dashboard['annotation_tags']
dashboard['annotations'] = annotations
def _get(url, profile):
'''Get a specific dashboard.'''
request_url = "{0}/api/dashboards/{1}".format(profile.get('grafana_url'),
url)
if profile.get('grafana_token', False):
response = requests.get(
request_url,
headers=_get_headers(profile),
timeout=profile.get('grafana_timeout', 3),
)
else:
response = requests.get(
request_url,
auth=_get_auth(profile),
timeout=profile.get('grafana_timeout', 3),
)
data = response.json()
if data.get('message') == 'Not found':
return None
if 'dashboard' not in data:
return None
return data['dashboard']
def _delete(url, profile):
'''Delete a specific dashboard.'''
request_url = "{0}/api/dashboards/{1}".format(profile.get('grafana_url'),
url)
if profile.get('grafana_token', False):
response = requests.delete(
request_url,
headers=_get_headers(profile),
timeout=profile.get('grafana_timeout'),
)
else:
response = requests.delete(
request_url,
auth=_get_auth(profile),
timeout=profile.get('grafana_timeout'),
)
data = response.json()
return data
def _update(dashboard, profile):
'''Update a specific dashboard.'''
payload = {
'dashboard': dashboard,
'overwrite': True
}
request_url = "{0}/api/dashboards/db".format(profile.get('grafana_url'))
if profile.get('grafana_token', False):
response = requests.post(
request_url,
headers=_get_headers(profile),
json=payload
)
else:
response = requests.post(
request_url,
auth=_get_auth(profile),
json=payload
)
return response.json()
def _get_headers(profile):
return {
'Accept': 'application/json',
'Authorization': 'Bearer {0}'.format(profile['grafana_token'])
}
def _get_auth(profile):
return requests.auth.HTTPBasicAuth(
profile['grafana_user'],
profile['grafana_password']
)
def _dashboard_diff(_new_dashboard, _old_dashboard):
'''Return a dictionary of changes between dashboards.'''
diff = {}
# Dashboard diff
new_dashboard = copy.deepcopy(_new_dashboard)
old_dashboard = copy.deepcopy(_old_dashboard)
dashboard_diff = DictDiffer(new_dashboard, old_dashboard)
diff['dashboard'] = _stripped({
'changed': list(dashboard_diff.changed()) or None,
'added': list(dashboard_diff.added()) or None,
'removed': list(dashboard_diff.removed()) or None,
})
# Row diff
new_rows = new_dashboard.get('rows', [])
old_rows = old_dashboard.get('rows', [])
new_rows_by_title = {}
old_rows_by_title = {}
for row in new_rows:
if 'title' in row:
new_rows_by_title[row['title']] = row
for row in old_rows:
if 'title' in row:
old_rows_by_title[row['title']] = row
rows_diff = DictDiffer(new_rows_by_title, old_rows_by_title)
diff['rows'] = _stripped({
'added': list(rows_diff.added()) or None,
'removed': list(rows_diff.removed()) or None,
})
for changed_row_title in rows_diff.changed():
old_row = old_rows_by_title[changed_row_title]
new_row = new_rows_by_title[changed_row_title]
row_diff = DictDiffer(new_row, old_row)
diff['rows'].setdefault('changed', {})
diff['rows']['changed'][changed_row_title] = _stripped({
'changed': list(row_diff.changed()) or None,
'added': list(row_diff.added()) or None,
'removed': list(row_diff.removed()) or None,
})
# Panel diff
old_panels_by_id = {}
new_panels_by_id = {}
for row in old_dashboard.get('rows', []):
for panel in row.get('panels', []):
if 'id' in panel:
old_panels_by_id[panel['id']] = panel
for row in new_dashboard.get('rows', []):
for panel in row.get('panels', []):
if 'id' in panel:
new_panels_by_id[panel['id']] = panel
panels_diff = DictDiffer(new_panels_by_id, old_panels_by_id)
diff['panels'] = _stripped({
'added': list(panels_diff.added()) or None,
'removed': list(panels_diff.removed()) or None,
})
for changed_panel_id in panels_diff.changed():
old_panel = old_panels_by_id[changed_panel_id]
new_panel = new_panels_by_id[changed_panel_id]
panels_diff = DictDiffer(new_panel, old_panel)
diff['panels'].setdefault('changed', {})
diff['panels']['changed'][changed_panel_id] = _stripped({
'changed': list(panels_diff.changed()) or None,
'added': list(panels_diff.added()) or None,
'removed': list(panels_diff.removed()) or None,
})
return diff
def _stripped(d):
'''Strip falsey entries.'''
ret = {}
for k, v in six.iteritems(d):
if v:
ret[k] = v
return ret

View File

@ -0,0 +1,271 @@
# -*- coding: utf-8 -*-
'''
Manage Grafana v3.0 data sources
.. versionadded:: 2016.3.0
Token auth setup
.. code-block:: yaml
grafana:
grafana_version: 3
grafana_timeout: 5
grafana_token: qwertyuiop
grafana_url: 'https://url.com'
Basic auth setup
.. code-block:: yaml
grafana:
grafana_version: 3
grafana_timeout: 5
grafana_user: grafana
grafana_password: qwertyuiop
grafana_url: 'https://url.com'
.. code-block:: yaml
Ensure influxdb data source is present:
grafana_datasource.present:
- name: influxdb
- type: influxdb
- url: http://localhost:8086
- access: proxy
- basic_auth: true
- basic_auth_user: myuser
- basic_auth_password: mypass
- is_default: true
'''
from __future__ import absolute_import
import requests
from salt.ext.six import string_types
def __virtual__():
'''Only load if grafana v3.0 is configured.'''
return __salt__['config.get']('grafana_version', 1) == 3
def present(name,
type,
url,
access='proxy',
user='',
password='',
database='',
basic_auth=False,
basic_auth_user='',
basic_auth_password='',
is_default=False,
type_logo_url='public/app/plugins/datasource/graphite/img/graphite_logo.png',
with_credentials=False,
json_data=None,
profile='grafana'):
'''
Ensure that a data source is present.
name
Name of the data source.
type
Which type of data source it is ('graphite', 'influxdb' etc.).
url
The URL to the data source API.
user
Optional - user to authenticate with the data source
password
Optional - password to authenticate with the data source
basic_auth
Optional - set to True to use HTTP basic auth to authenticate with the
data source.
basic_auth_user
Optional - HTTP basic auth username.
basic_auth_password
Optional - HTTP basic auth password.
is_default
Default: False
'''
if isinstance(profile, string_types):
profile = __salt__['config.option'](profile)
ret = {'name': name, 'result': None, 'comment': None, 'changes': None}
datasource = _get_datasource(profile, name)
data = _get_json_data(name, type, url, access, user, password, database,
basic_auth, basic_auth_user, basic_auth_password, is_default, json_data)
if datasource:
if profile.get('grafana_token', False):
requests.put(
_get_url(profile, datasource['id']),
data,
headers=_get_headers(profile),
timeout=profile.get('grafana_timeout', 3),
)
else:
requests.put(
_get_url(profile, datasource['id']),
data,
auth=_get_auth(profile),
timeout=profile.get('grafana_timeout', 3),
)
ret['result'] = True
ret['changes'] = _diff(datasource, data)
if ret['changes']['new'] or ret['changes']['old']:
ret['comment'] = 'Data source {0} updated'.format(name)
else:
ret['changes'] = None
ret['comment'] = 'Data source {0} already up-to-date'.format(name)
else:
if profile.get('grafana_token', False):
requests.post(
'{0}/api/datasources'.format(profile['grafana_url']),
data,
headers=_get_headers(profile),
timeout=profile.get('grafana_timeout', 3),
)
else:
requests.put(
'{0}/api/datasources'.format(profile['grafana_url']),
data,
auth=_get_auth(profile),
timeout=profile.get('grafana_timeout', 3),
)
ret['result'] = True
ret['comment'] = 'New data source {0} added'.format(name)
ret['changes'] = data
return ret
def absent(name, profile='grafana'):
'''
Ensure that a data source is present.
name
Name of the data source to remove.
'''
if isinstance(profile, string_types):
profile = __salt__['config.option'](profile)
ret = {'result': None, 'comment': None, 'changes': None}
datasource = _get_datasource(profile, name)
if not datasource:
ret['result'] = True
ret['comment'] = 'Data source {0} already absent'.format(name)
return ret
if profile.get('grafana_token', False):
requests.delete(
_get_url(profile, datasource['id']),
headers=_get_headers(profile),
timeout=profile.get('grafana_timeout', 3),
)
else:
requests.delete(
_get_url(profile, datasource['id']),
auth=_get_auth(profile),
timeout=profile.get('grafana_timeout', 3),
)
ret['result'] = True
ret['comment'] = 'Data source {0} was deleted'.format(name)
return ret
def _get_url(profile, datasource_id):
return '{0}/api/datasources/{1}'.format(
profile['grafana_url'],
datasource_id
)
def _get_datasource(profile, name):
if profile.get('grafana_token', False):
response = requests.get(
'{0}/api/datasources'.format(profile['grafana_url']),
headers=_get_headers(profile),
timeout=profile.get('grafana_timeout', 3),
)
else:
response = requests.get(
'{0}/api/datasources'.format(profile['grafana_url']),
auth=_get_auth(profile),
timeout=profile.get('grafana_timeout', 3),
)
data = response.json()
for datasource in data:
if datasource['name'] == name:
return datasource
return None
def _get_headers(profile):
return {
'Accept': 'application/json',
'Authorization': 'Bearer {0}'.format(profile['grafana_token'])
}
def _get_auth(profile):
return requests.auth.HTTPBasicAuth(
profile['grafana_user'],
profile['grafana_password']
)
def _get_json_data(name,
type,
url,
access='proxy',
user='',
password='',
database='',
basic_auth=False,
basic_auth_user='',
basic_auth_password='',
is_default=False,
type_logo_url='public/app/plugins/datasource/graphite/img/graphite_logo.png',
with_credentials=False,
json_data=None):
return {
'name': name,
'type': type,
'url': url,
'access': access,
'user': user,
'password': password,
'database': database,
'basicAuth': basic_auth,
'basicAuthUser': basic_auth_user,
'basicAuthPassword': basic_auth_password,
'isDefault': is_default,
'typeLogoUrl': type_logo_url,
'withCredentials': with_credentials,
'jsonData': json_data,
}
def _diff(old, new):
old_keys = old.keys()
old = old.copy()
new = new.copy()
for key in old_keys:
if key == 'id' or key == 'orgId':
del old[key]
elif old[key] == new[key]:
del old[key]
del new[key]
return {'old': old, 'new': new}

82
grafana/client.sls Normal file
View File

@ -0,0 +1,82 @@
{%- from "grafana/map.jinja" import client with context %}
{%- if client.enabled %}
/etc/salt/minion.d/_grafana.conf:
file.managed:
- source: salt://grafana/files/_grafana.conf
- template: jinja
- user: root
- group: root
{%- for datasource_name, datasource in client.datasource.iteritems() %}
grafana_client_datasource_{{ datasource_name }}:
grafana3_datasource.present:
- name: {{ datasource_name }}
- type: {{ datasource.type }}
- url: http://{{ datasource.host }}:{{ datasource.get('port', 80) }}
{%- if datasource.access is defined %}
- access: proxy
{%- endif %}
{%- if datasource.user is defined %}
- basic_auth: true
- basic_auth_user: {{ datasource.user }}
- basic_auth_password: {{ datasource.password }}
{%- endif %}
{%- endfor %}
{%- set raw_dict = {} %}
{%- set final_dict = {} %}
{%- if client.remote_data.engine == 'salt_mine' %}
{%- for node_name, node_grains in salt['mine.get']('*', 'grains.items').iteritems() %}
{%- if node_grains.grafana is defined %}
{%- set raw_dict = salt['grains.filter_by']({'default': raw_dict}, merge=node_grains.grafana.get('dashboard', {})) %}
{%- endif %}
{%- endfor %}
{%- endif %}
{%- if client.dashboard is defined %}
{%- set raw_dict = salt['grains.filter_by']({'default': raw_dict}, merge=client.dashboard) %}
{%- endif %}
{%- for dashboard_name, dashboard in raw_dict.iteritems() %}
{%- set rows = [] %}
{%- for row_name, row in dashboard.get('row', {}).iteritems() %}
{%- set panels = [] %}
{%- for panel_name, panel in row.get('panel', {}).iteritems() %}
{%- set targets = [] %}
{%- for target_name, target in panel.get('target', {}).iteritems() %}
{%- do targets.extend([target]) %}
{%- endfor %}
{%- do panel.update({'targets': targets}) %}
{%- do panels.extend([panel]) %}
{%- endfor %}
{%- do row.update({'panels': panels}) %}
{%- do rows.extend([row]) %}
{%- endfor %}
{%- do dashboard.update({'rows': rows}) %}
{%- do final_dict.update({dashboard_name: dashboard}) %}
{%- endfor %}
{%- for dashboard_name, dashboard in final_dict.iteritems() %}
{%- if dashboard.get('enabled', True) %}
grafana_client_dashboard_{{ dashboard_name }}:
grafana3_dashboard.present:
- name: {{ dashboard_name }}
- dashboard: {{ dashboard }}
{%- else %}
grafana_client_dashboard_{{ dashboard_name }}:
grafana3_dashboard.absent:
- name: {{ dashboard_name }}
{%- endif %}
{%- endfor %}
{%- endif %}

50
grafana/collector.sls Normal file
View File

@ -0,0 +1,50 @@
{%- from "grafana/map.jinja" import collector with context %}
{%- if collector.enabled %}
grafana_grains_dir:
file.directory:
- name: /etc/salt/grains.d
- mode: 700
- makedirs: true
- user: root
{%- set service_grains = {} %}
{# Loading the other service support metadata for localhost #}
{%- for service_name, service in pillar.iteritems() %}
{%- macro load_grains_file(grains_fragment_file) %}{% include grains_fragment_file ignore missing %}{% endmacro %}
{%- set grains_fragment_file = service_name+'/meta/grafana.yml' %}
{%- set grains_yaml = load_grains_file(grains_fragment_file)|load_yaml %}
{%- set service_grains = salt['grains.filter_by']({'default': service_grains}, merge=grains_yaml) %}
{%- endfor %}
grafana_grain:
file.managed:
- name: /etc/salt/grains.d/grafana
- source: salt://grafana/files/grafana.grain
- template: jinja
- user: root
- mode: 600
- defaults:
service_grains:
grafana: {{ service_grains|yaml }}
- require:
- file: grafana_grains_dir
grafana_grains_file:
cmd.wait:
- name: cat /etc/salt/grains.d/* > /etc/salt/grains
- watch:
- file: grafana_grain
grafana_grains_publish:
module.run:
- name: mine.update
- watch:
- cmd: grafana_grains_file
{%- endif %}

View File

@ -0,0 +1,13 @@
{%- from "grafana/map.jinja" import client with context %}
grafana_version: {{ client.server.get('version', 3) }}
grafana:
grafana_timeout: 3
{%- if client.server.token is defined %}
grafana_token: {{ client.server.token }}
{%- else %}
grafana_user: {{ client.server.user }}
grafana_password: {{ client.server.password }}
{%- endif %}
grafana_url: '{{ client.server.get('protocol', 'http') }}://{{ client.server.host }}:{{ client.server.get('port', 80) }}'

View File

@ -0,0 +1,898 @@
{
"annotations": {
"list": [
{
"datasource": "lma",
"enable": true,
"iconColor": "#C0C6BE",
"iconSize": 13,
"lineColor": "rgba(255, 96, 96, 0.592157)",
"name": "Status",
"query": "select title,tags,text from annotations where $timeFilter and cluster = 'apache'",
"showLine": true,
"tagsColumn": "tags",
"textColumn": "text",
"titleColumn": "title"
}
]
},
"editable": true,
"hideControls": false,
"id": null,
"links": [],
"originalTitle": "Apache",
"refresh": "1m",
"rows": [
{
"collapse": false,
"editable": true,
"height": "250px",
"panels": [
{
"cacheTimeout": null,
"colorBackground": true,
"colorValue": false,
"colors": [
"rgba(71, 212, 59, 0.4)",
"rgba(241, 181, 37, 0.73)",
"rgba(225, 40, 40, 0.59)"
],
"datasource": null,
"editable": true,
"error": false,
"format": "none",
"gauge": {
"maxValue": 100,
"minValue": 0,
"show": false,
"thresholdLabels": false,
"thresholdMarkers": true
},
"id": 11,
"interval": "> 60s",
"links": [],
"maxDataPoints": 100,
"nullPointMode": "connected",
"nullText": null,
"postfix": "",
"postfixFontSize": "50%",
"prefix": "",
"prefixFontSize": "50%",
"span": 3,
"sparkline": {
"fillColor": "rgba(31, 118, 189, 0.18)",
"full": false,
"lineColor": "rgb(31, 120, 193)",
"show": false
},
"targets": [
{
"column": "value",
"condition": "",
"dsType": "influxdb",
"fill": "",
"function": "last",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"groupByTags": [],
"groupby_field": "",
"interval": "",
"measurement": "cluster_status",
"policy": "default",
"query": "SELECT last(\"value\") FROM \"cluster_status\" WHERE \"cluster_name\" = 'apache' AND $timeFilter GROUP BY time($interval) fill(null)",
"rawQuery": false,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "last"
}
]
],
"tags": [
{
"key": "environment_label",
"operator": "=",
"value": "$environment"
},
{
"key": "cluster_name",
"operator": "=",
"value": "apache"
}
]
}
],
"thresholds": "1,3",
"title": "",
"type": "singlestat",
"valueFontSize": "80%",
"valueMaps": [
{
"op": "=",
"text": "no data",
"value": "null"
},
{
"op": "=",
"text": "OKAY",
"value": "0"
},
{
"op": "=",
"text": "WARN",
"value": "1"
},
{
"op": "=",
"text": "UNKN",
"value": "2"
},
{
"op": "=",
"text": "CRIT",
"value": "3"
},
{
"op": "=",
"text": "DOWN",
"value": "4"
}
],
"valueName": "current"
},
{
"aliasColors": {},
"bars": false,
"datasource": null,
"editable": true,
"error": false,
"fill": 1,
"grid": {
"threshold1": null,
"threshold1Color": "rgba(216, 200, 27, 0.27)",
"threshold2": null,
"threshold2Color": "rgba(234, 112, 112, 0.22)"
},
"id": 9,
"interval": "> 60s",
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": false,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 6,
"stack": false,
"steppedLine": false,
"targets": [
{
"column": "value",
"dsType": "influxdb",
"function": "mean",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"0"
],
"type": "fill"
}
],
"groupByTags": [],
"measurement": "apache_requests",
"policy": "default",
"query": "SELECT mean(\"value\") FROM \"apache_requests\" WHERE \"hostname\" = '$server' AND $timeFilter GROUP BY time($interval) fill(0)",
"rawQuery": false,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "hostname",
"value": "$server"
}
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Number of requests",
"tooltip": {
"msResolution": false,
"shared": false,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"show": true
},
"yaxes": [
{
"format": "short",
"label": "per second",
"logBase": 1,
"max": null,
"min": 0,
"show": true
},
{
"format": "short",
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
},
{
"aliasColors": {},
"bars": false,
"datasource": null,
"editable": true,
"error": false,
"fill": 1,
"grid": {
"threshold1": null,
"threshold1Color": "rgba(216, 200, 27, 0.27)",
"threshold2": null,
"threshold2Color": "rgba(234, 112, 112, 0.22)"
},
"id": 8,
"interval": "> 60s",
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": false,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 9,
"stack": false,
"steppedLine": false,
"targets": [
{
"column": "value",
"dsType": "influxdb",
"function": "mean",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"0"
],
"type": "fill"
}
],
"groupByTags": [],
"measurement": "apache_bytes",
"policy": "default",
"query": "SELECT mean(\"value\") FROM \"apache_bytes\" WHERE \"hostname\" = '$server' AND $timeFilter GROUP BY time($interval) fill(0)",
"rawQuery": false,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "hostname",
"value": "$server"
}
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Bytes/s transmitted",
"tooltip": {
"msResolution": false,
"shared": true,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"show": true
},
"yaxes": [
{
"format": "bytes",
"label": "Bytes/s",
"logBase": 1,
"max": null,
"min": 0,
"show": true
},
{
"format": "short",
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
},
{
"aliasColors": {},
"bars": false,
"datasource": null,
"editable": true,
"error": false,
"fill": 1,
"grid": {
"threshold1": null,
"threshold1Color": "rgba(216, 200, 27, 0.27)",
"threshold2": null,
"threshold2Color": "rgba(234, 112, 112, 0.22)"
},
"id": 10,
"interval": "> 60s",
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": false,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 9,
"stack": false,
"steppedLine": false,
"targets": [
{
"column": "value",
"dsType": "influxdb",
"function": "mean",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"0"
],
"type": "fill"
}
],
"groupByTags": [],
"measurement": "apache_connections",
"policy": "default",
"query": "SELECT mean(\"value\") FROM \"apache_connections\" WHERE \"hostname\" = '$server' AND $timeFilter GROUP BY time($interval) fill(0)",
"rawQuery": false,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "hostname",
"value": "$server"
}
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Number of connections",
"tooltip": {
"msResolution": false,
"shared": true,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"show": true
},
"yaxes": [
{
"format": "short",
"logBase": 1,
"max": null,
"min": 0,
"show": true
},
{
"format": "short",
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
},
{
"cacheTimeout": null,
"colorBackground": false,
"colorValue": false,
"colors": [
"rgba(245, 54, 54, 0.9)",
"rgba(237, 129, 40, 0.89)",
"rgba(50, 172, 45, 0.97)"
],
"datasource": null,
"editable": true,
"error": false,
"format": "none",
"gauge": {
"maxValue": 100,
"minValue": 0,
"show": false,
"thresholdLabels": false,
"thresholdMarkers": true
},
"id": 6,
"interval": "> 60s",
"links": [],
"maxDataPoints": 100,
"nullPointMode": "connected",
"nullText": null,
"postfix": "",
"postfixFontSize": "50%",
"prefix": "",
"prefixFontSize": "50%",
"span": 3,
"sparkline": {
"fillColor": "rgba(31, 118, 189, 0.18)",
"full": false,
"lineColor": "rgb(31, 120, 193)",
"show": true
},
"targets": [
{
"column": "value",
"dsType": "influxdb",
"function": "last",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"groupByTags": [],
"measurement": "apache_connections",
"policy": "default",
"query": "SELECT last(\"value\") FROM \"apache_connections\" WHERE \"hostname\" = '$server' AND $timeFilter GROUP BY time($interval) fill(null)",
"rawQuery": false,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "last"
}
]
],
"tags": [
{
"key": "hostname",
"value": "$server"
}
]
}
],
"thresholds": "",
"title": "Current connections",
"type": "singlestat",
"valueFontSize": "80%",
"valueMaps": [
{
"op": "=",
"text": "N/A",
"value": "null"
}
],
"valueName": "current"
},
{
"aliasColors": {},
"bars": false,
"datasource": null,
"decimals": null,
"editable": true,
"error": false,
"fill": 1,
"grid": {
"threshold1": null,
"threshold1Color": "rgba(216, 200, 27, 0.27)",
"threshold2": null,
"threshold2Color": "rgba(234, 112, 112, 0.22)"
},
"id": 1,
"interval": "> 60s",
"legend": {
"alignAsTable": true,
"avg": false,
"current": false,
"max": false,
"min": false,
"rightSide": true,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 9,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "$m",
"column": "value",
"dsType": "influxdb",
"function": "mean",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"0"
],
"type": "fill"
}
],
"groupByTags": [],
"measurement": "/apache_workers/",
"policy": "default",
"query": "SELECT mean(\"value\") FROM /apache_workers/ WHERE \"hostname\" = '$server' AND $timeFilter GROUP BY time($interval) fill(0)",
"rawQuery": false,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "hostname",
"value": "$server"
}
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Workers states",
"tooltip": {
"msResolution": false,
"shared": true,
"value_type": "individual"
},
"transparent": false,
"type": "graph",
"xaxis": {
"show": true
},
"yaxes": [
{
"format": "short",
"logBase": 1,
"max": null,
"min": 0,
"show": true
},
{
"format": "short",
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
},
{
"cacheTimeout": null,
"colorBackground": false,
"colorValue": false,
"colors": [
"rgba(245, 54, 54, 0.9)",
"rgba(237, 129, 40, 0.89)",
"rgba(50, 172, 45, 0.97)"
],
"datasource": null,
"editable": true,
"error": false,
"format": "none",
"gauge": {
"maxValue": 100,
"minValue": 0,
"show": false,
"thresholdLabels": false,
"thresholdMarkers": true
},
"id": 4,
"interval": "> 60s",
"links": [],
"maxDataPoints": 100,
"nullPointMode": "connected",
"nullText": null,
"postfix": "",
"postfixFontSize": "50%",
"prefix": "",
"prefixFontSize": "50%",
"span": 3,
"sparkline": {
"fillColor": "rgba(31, 118, 189, 0.18)",
"full": false,
"lineColor": "rgb(31, 120, 193)",
"show": true
},
"targets": [
{
"column": "value",
"dsType": "influxdb",
"function": "last",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"groupByTags": [],
"measurement": "apache_idle_workers",
"policy": "default",
"query": "SELECT last(\"value\") FROM \"apache_idle_workers\" WHERE \"hostname\" = '$server' AND $timeFilter GROUP BY time($interval) fill(null)",
"rawQuery": false,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "last"
}
]
],
"tags": [
{
"key": "hostname",
"value": "$server"
}
]
}
],
"thresholds": "",
"title": "Current Idle Workers",
"type": "singlestat",
"valueFontSize": "80%",
"valueMaps": [
{
"op": "=",
"text": "N/A",
"value": "null"
}
],
"valueName": "current"
}
],
"title": "Metrics"
}
],
"schemaVersion": 12,
"sharedCrosshair": true,
"style": "dark",
"tags": [],
"templating": {
"enable": true,
"list": [
{
"allFormat": "regex values",
"current": {},
"datasource": null,
"hide": 0,
"includeAll": false,
"name": "environment",
"options": [],
"query": "show tag values from cpu_idle with key = environment_label",
"refresh": 1,
"refresh_on_load": true,
"regex": "",
"type": "query"
},
{
"allFormat": "glob",
"current": {},
"datasource": null,
"hide": 0,
"includeAll": false,
"name": "server",
"options": [],
"query": "show tag values from apache_requests with key = hostname where environment_label = '$environment'",
"refresh": 1,
"refresh_on_load": true,
"regex": "",
"type": "query"
}
]
},
"time": {
"from": "now-1h",
"to": "now"
},
"timepicker": {
"collapse": false,
"enable": true,
"notice": false,
"now": true,
"refresh_intervals": [
"5s",
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"status": "Stable",
"time_options": [
"5m",
"15m",
"1h",
"6h",
"12h",
"24h",
"2d",
"7d",
"30d"
],
"type": "timepicker"
},
"timezone": "browser",
"title": "Apache",
"version": 2
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
{{ service_grains|yaml(False) }}

View File

@ -25,7 +25,7 @@
# The ip address to bind to, empty will bind to all interfaces
http_addr = {{ server.bind.address }}
# The http port to use
# The http port to use
http_port = {{ server.bind.port }}
# The public facing domain name used to access grafana from a browser
@ -55,21 +55,25 @@ http_port = {{ server.bind.port }}
[database]
# Either "mysql", "postgres" or "sqlite3", it's your choice
type = {% if server.database.engine == "postgresql" %}postgres{% else %}{{ server.database.engine }}{% endif %}
{%- if server.database.engine in ["postgresql", "mysql"] %}
host = {{ server.database.host }}:{{ server.database.port }}
name = {{ server.database.name }}
user = {{ server.database.user }}
password = {{ server.database.password }}
{%- endif %}
# For "postgres" only, either "disable", "require" or "verify-full"
;ssl_mode = disable
# For "sqlite3" only, path relative to data_path setting
;path = grafana.db
{%- if server.database.engine in ["sqlite"] %}
path = grafana.db
{%- endif %}
#################################### Session ####################################
[session]
# Either "memory", "file", "redis", "mysql", "postgres", default is "file"
provider = {{ server.get('session', {}).get('engine', 'file') }}
provider = {{ server.session.engine }}
# Provider config options
# memory: not have any config yet
@ -77,7 +81,7 @@ provider = {{ server.get('session', {}).get('engine', 'file') }}
# redis: config like redis server e.g. `addr=127.0.0.1:6379,pool_size=100,db=grafana`
# mysql: go-sql-driver/mysql dsn config string, e.g. `user:password@tcp(127.0.0.1:3306)/database_name`
# postgres: user=a password=b host=localhost port=5432 dbname=c sslmode=disable
{%- if server.get('session', {}).get('engine', 'file') == 'redis' %}
{%- if server.session.engine == 'redis' %}
provider_config = addr={{ server.session.get('host', '127.0.0.1') }}:{{ server.session.get('port', 6379) }},db={{ server.session.get('db', 'grafana') }}
{%- endif %}
@ -104,10 +108,10 @@ provider_config = addr={{ server.session.get('host', '127.0.0.1') }}:{{ server.s
#################################### Security ####################################
[security]
# default admin user, created on startup
;admin_user = admin
admin_user = {{ server.admin.user }}
# default admin password, can be changed before first start of grafana, or in profile settings
;admin_password = admin
admin_password = {{ server.admin.password }}
# used for signing
;secret_key = SW2YcwTIb9zpOOhoPsMm
@ -126,21 +130,21 @@ provider_config = addr={{ server.session.get('host', '127.0.0.1') }}:{{ server.s
#################################### Users ####################################
[users]
# disable user signup / registration
allow_sign_up = {{ server.get('users', {}).get('sign_up', True)|lower }}
allow_sign_up = {{ server.allow_sign_up|lower }}
# Allow non admin users to create organizations
allow_org_create = {{ server.get('users', {}).get('org_create', True)|lower }}
allow_org_create = {{ server.allow_org_create|lower }}
# Set to true to automatically assign new users to the default organization (id 1)
;auto_assign_org = true
# Default role new users will be automatically assigned (if disabled above is set to true)
;auto_assign_org_role = Viewer
auto_assign_org_role = {{ server.get('users', {}).get('auto_assign_role', 'Viewer') }}
auto_assign_org_role = {{ server.auto_assign_role }}
#################################### Anonymous Auth ##########################
[auth.anonymous]
{%- if server.get('auth', {}).get('engine', None) == 'anonymous' %}
{%- if server.auth.engine == 'anonymous' %}
enabled = true
{%- if server.auth.organization is defined %}
@ -189,7 +193,7 @@ org_name = {{ server.auth.role }}
#################################### Auth Proxy ##########################
[auth.proxy]
{%- if server.get('auth', {}).get('engine', None) == 'proxy' %}
{%- if server.auth.engine == 'proxy' %}
enabled = true
header_name = {{ server.auth.get('header', 'X-Forwarded-User') }}
header_property = {{ server.auth.get('header_property', 'username') }}
@ -198,10 +202,10 @@ auto_sign_up = true
#################################### Basic Auth ##########################
[auth.basic]
{%- if server.get('auth', {}).get('engine', 'basic') != 'basic' %}
enabled = false
{%- else %}
{%- if server.auth.engine == 'basic' %}
enabled = true
{%- else %}
enabled = false
{%- endif %}
#################################### Auth LDAP ##########################
@ -273,6 +277,11 @@ enabled = false
;#################################### Dashboard JSON files ##########################
[dashboards.json]
{%- if server.dashboards.enabled %}
enabled = true
path = {{ server.dashboards.path }}
{%- else %}
;enabled = false
;path = /var/lib/grafana/dashboards
{%- endif %}

View File

@ -4,4 +4,10 @@ include:
{%- if pillar.grafana.server is defined %}
- grafana.server
{%- endif %}
{%- if pillar.grafana.client is defined %}
- grafana.client
{%- endif %}
{%- if pillar.grafana.collector is defined %}
- grafana.collector
{%- endif %}
{%- endif %}

View File

@ -1,5 +1,5 @@
{%- load_yaml as base_defaults %}
{%- load_yaml as server_defaults %}
Debian:
pkgs:
- grafana
@ -7,6 +7,41 @@ Debian:
bind:
address: 0.0.0.0
port: 3000
session:
engine: file
auth:
engine: application
admin:
user: admin
password: admin
allow_sign_up: False
allow_org_create: False
auto_assign_role: Viewer
dir:
static: /usr/share/grafana/public
dashboards:
enabled: false
{%- endload %}
{%- set server = salt['grains.filter_by'](base_defaults, merge=salt['pillar.get']('grafana:server')) %}
{%- set server = salt['grains.filter_by'](server_defaults, merge=salt['pillar.get']('grafana:server')) %}
{%- load_yaml as client_defaults %}
Debian:
server:
host: 127.0.0.1
port: 3000
remote_data:
engine: none
datasource: {}
dashboard: {}
{%- endload %}
{%- set client = salt['grains.filter_by'](client_defaults, merge=salt['pillar.get']('grafana:client')) %}
{%- load_yaml as collector_defaults %}
default:
storage:
engine: salt-mine
{%- endload %}
{%- set collector = salt['grains.filter_by'](collector_defaults, merge=salt['pillar.get']('grafana:collector')) %}

45
grafana/meta/grafana.yml Normal file
View File

@ -0,0 +1,45 @@
{%- if pillar.get('grafana').collector is defined %}
dashboard:
test-single-{{ grains.host }}:
title: Dashboard single {{ grains.host }}
editable: true
hideControls: false
row:
single:
title: Single row
height: 250px
showTitle: true
panel:
first:
title: Single Panel
span: 8
editable: false
type: graph
target:
A:
refId: A
target: "support_prd.cfg01_iot_tcpcloud_eu.cpu.0.idle"
datasource: graphite01
renderer: flot
test-merge:
title: Dashboard merge
editable: true
hideControls: false
row:
merge:
showTitle: true
title: Merge
height: 250px
panel:
merge:
title: Merge Panel
span: 8
editable: false
type: graph
target:
{{ grains.host }}:
refId: A
target: "support_prd.cfg01_iot_tcpcloud_eu.cpu.0.idle"
datasource: graphite01
renderer: flot
{%- endif %}

View File

@ -14,12 +14,55 @@ grafana_packages:
- require:
- pkg: grafana_packages
{%- if server.dashboards.enabled %}
grafana_copy_default_dashboards:
file.recurse:
- name: {{ server.dashboards.path }}
- source: salt://grafana/files/dashboards
- user: grafana
- group: grafana
- require:
- pkg: grafana_packages
- require_in:
- service: grafana_service
{%- endif %}
{%- for theme_name, theme in server.get('theme', {}).iteritems() %}
{%- if theme.css_override is defined %}
grafana_{{ theme_name }}_css_override:
file.managed:
- names:
- {{ server.dir.static }}/css/grafana.{{ theme_name }}.min.css
{%- if theme.css_override.build is defined %}
- {{ server.dir.static }}/css/grafana.{{ theme_name }}.min.{{ theme.css_override.build }}.css
{%- endif %}
- source: {{ theme.css_override.source }}
{%- if theme.css_override.source_hash is defined %}
- source_hash: {{ theme.css_override.source_hash }}
{%- endif %}
- user: grafana
- group: grafana
- require:
- pkg: grafana_packages
- require_in:
- service: grafana_service
{%- endif %}
{%- endfor %}
grafana_service:
service.running:
- name: {{ server.service }}
- enable: true
- reload: true
# It is needed if client is trying to set datasource or dashboards before
# server is ready.
- init_delay: 5
- watch:
- file: /etc/grafana/grafana.ini
{%- endif %}
{%- endif %}