Commit 51617df2 by John Donnal

Merge branch 'master' of git.wattsworth.net:wattsworth/puppet

parents 91ae9a87 43665687
Showing with 3108 additions and 795 deletions
.alert-info {
color: #31708f;
background-color: #d9edf7;
border-color: #bce8f1;
font-size: larger;
line-height: 2;
}
.alert {
padding: 5px;
margin-bottom: 20px;
border: 1px solid;
border-radius: 4px;
}
/* Bootstrap table styling */
table {
background-color: transparent;
}
caption {
padding-top: 8px;
padding-bottom: 8px;
color: #777777;
text-align: left;
}
th {
text-align: left;
}
.table {
width: 100%;
max-width: 100%;
margin-bottom: 20px;
}
.table>thead>tr>th, .table>tbody>tr>th, .table>tfoot>tr>th, .table>thead>tr>td, .table>tbody>tr>td, .table>tfoot>tr>td {
padding: 8px;
line-height: 1.42857143;
vertical-align: top;
border-top: 1px solid #ddd;
}
.table>thead>tr>th {
vertical-align: bottom;
border-bottom: 2px solid #ddd;
}
.table>caption+thead>tr:first-child>th, .table>colgroup+thead>tr:first-child>th, .table>thead:first-child>tr:first-child>th, .table>caption+thead>tr:first-child>td, .table>colgroup+thead>tr:first-child>td, .table>thead:first-child>tr:first-child>td {
border-top: 0;
}
.table>tbody+tbody {
border-top: 2px solid #ddd;
}
.table .table {
background-color: #fff;
}
.table-condensed>thead>tr>th, .table-condensed>tbody>tr>th, .table-condensed>tfoot>tr>th, .table-condensed>thead>tr>td, .table-condensed>tbody>tr>td, .table-condensed>tfoot>tr>td {
padding: 5px;
}
.table-bordered {
border: 1px solid #ddd;
}
.table-bordered>thead>tr>th, .table-bordered>tbody>tr>th, .table-bordered>tfoot>tr>th, .table-bordered>thead>tr>td, .table-bordered>tbody>tr>td, .table-bordered>tfoot>tr>td {
border: 1px solid #ddd;
}
.table-bordered>thead>tr>th, .table-bordered>thead>tr>td {
border-bottom-width: 2px;
}
.table-striped>tbody>tr:nth-of-type(odd) {
background-color: #f9f9f9;
}
.table-hover>tbody>tr:hover {
background-color: #f5f5f5;
}
table col[class*="col-"] {
position: static;
float: none;
display: table-column;
}
table td[class*="col-"], table th[class*="col-"] {
position: static;
float: none;
display: table-cell;
}
.table>thead>tr>td.active, .table>tbody>tr>td.active, .table>tfoot>tr>td.active, .table>thead>tr>th.active, .table>tbody>tr>th.active, .table>tfoot>tr>th.active, .table>thead>tr.active>td, .table>tbody>tr.active>td, .table>tfoot>tr.active>td, .table>thead>tr.active>th, .table>tbody>tr.active>th, .table>tfoot>tr.active>th {
background-color: #f5f5f5;
}
.table-hover>tbody>tr>td.active:hover, .table-hover>tbody>tr>th.active:hover, .table-hover>tbody>tr.active:hover>td, .table-hover>tbody>tr:hover>.active, .table-hover>tbody>tr.active:hover>th {
background-color: #e8e8e8;
}
.table>thead>tr>td.success, .table>tbody>tr>td.success, .table>tfoot>tr>td.success, .table>thead>tr>th.success, .table>tbody>tr>th.success, .table>tfoot>tr>th.success, .table>thead>tr.success>td, .table>tbody>tr.success>td, .table>tfoot>tr.success>td, .table>thead>tr.success>th, .table>tbody>tr.success>th, .table>tfoot>tr.success>th {
background-color: #dff0d8;
}
.table-hover>tbody>tr>td.success:hover, .table-hover>tbody>tr>th.success:hover, .table-hover>tbody>tr.success:hover>td, .table-hover>tbody>tr:hover>.success, .table-hover>tbody>tr.success:hover>th {
background-color: #d0e9c6;
}
.table>thead>tr>td.info, .table>tbody>tr>td.info, .table>tfoot>tr>td.info, .table>thead>tr>th.info, .table>tbody>tr>th.info, .table>tfoot>tr>th.info, .table>thead>tr.info>td, .table>tbody>tr.info>td, .table>tfoot>tr.info>td, .table>thead>tr.info>th, .table>tbody>tr.info>th, .table>tfoot>tr.info>th {
background-color: #d9edf7;
}
.table-hover>tbody>tr>td.info:hover, .table-hover>tbody>tr>th.info:hover, .table-hover>tbody>tr.info:hover>td, .table-hover>tbody>tr:hover>.info, .table-hover>tbody>tr.info:hover>th {
background-color: #c4e3f3;
}
.table>thead>tr>td.warning, .table>tbody>tr>td.warning, .table>tfoot>tr>td.warning, .table>thead>tr>th.warning, .table>tbody>tr>th.warning, .table>tfoot>tr>th.warning, .table>thead>tr.warning>td, .table>tbody>tr.warning>td, .table>tfoot>tr.warning>td, .table>thead>tr.warning>th, .table>tbody>tr.warning>th, .table>tfoot>tr.warning>th {
background-color: #fcf8e3;
}
.table-hover>tbody>tr>td.warning:hover, .table-hover>tbody>tr>th.warning:hover, .table-hover>tbody>tr.warning:hover>td, .table-hover>tbody>tr:hover>.warning, .table-hover>tbody>tr.warning:hover>th {
background-color: #faf2cc;
}
.table>thead>tr>td.danger, .table>tbody>tr>td.danger, .table>tfoot>tr>td.danger, .table>thead>tr>th.danger, .table>tbody>tr>th.danger, .table>tfoot>tr>th.danger, .table>thead>tr.danger>td, .table>tbody>tr.danger>td, .table>tfoot>tr.danger>td, .table>thead>tr.danger>th, .table>tbody>tr.danger>th, .table>tfoot>tr.danger>th {
background-color: #f2dede;
}
.table-hover>tbody>tr>td.danger:hover, .table-hover>tbody>tr>th.danger:hover, .table-hover>tbody>tr.danger:hover>td, .table-hover>tbody>tr:hover>.danger, .table-hover>tbody>tr.danger:hover>th {
background-color: #ebcccc;
}
.table-responsive {
overflow-x: auto;
min-height: 0.01%;
}
# append plugins to the list below, separate entries with commas
# name, description, documentation_folder
#LabJack, Acquire data from UE9 devices, labjack
#NILM, Non-Intrusive Load Monitoring, nilm
#SmartEE, Connect to Smart Plugs, smartee
......@@ -88,7 +88,10 @@ html_theme = 'alabaster'
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
html_theme_options = {
"logo": "logo.png",
"show_powered_by": True
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
......@@ -152,5 +155,5 @@ texinfo_documents = [
'Miscellaneous'),
]
def setup(app):
app.add_stylesheet('css/custom.css') # may also be an URL
......@@ -6,6 +6,32 @@
The Wattsworth Project
======================
.. raw:: html
<div class="alert alert-info">
See <a href="/joule">Joule</a> for information on the modular data processing framework
<br/>
See <a href="/web">Web</a> for information on the frontend user interface
</div>
Active Plugins
--------------
The following plugins are active on this machine.
Click the name of a plugin to view the documentation.
.. raw:: html
<table class="table">
<thead>
<tr><th>Name</th><th>Description</th></tr>
</thead>
<tbody id="plugins-table">
</tbody>
</table>
.. raw:: html
Installing the Software
-----------------------
......@@ -25,9 +51,30 @@ Use the Puppet repository to install the complete Wattsworth stack
$> cd puppet
$> sudo puppet apply --modulepath=./modules --verbose site.pp
Here is info about Joule and the Web Frontend
Another edit
System Configuration
--------------------
Make sure you set up data journaling on the nilmdb partition. This will prevent
data corruption if the computer looses power without properly shutting down.
Edit ``/etc/fstab`` and add the ``data=journal`` option to the parition with the
nilmdb database.
.. code-block:: bash
# /etc/fstab: static file system information.
#
# <file system> <mount point> <type> <options> <dump> <pass>
UUID=XXX / ext4 errors=remount-ro,data=journal 0 1
# add this --^
If the partition is the root partition you must add this option directly to the volume
as well, otherwise the system will not boot properly. For example if your
root partition is on ``/dev/sda2``:
.. code-block:: bash
$> sudo tune2fs -o journal_data /dev/sda2
.. toctree::
:maxdepth: 2
......@@ -41,3 +88,36 @@ Indices and tables
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
.. _web: /web
.. _joule: /joule
.. raw:: html
<script type="text/javascript">
//hide the plugins section by default (in case plugins.txt is missing)
$("#active-plugins").hide();
var plugins="";
var nonce=new Date().getTime(); //so we force the browser to get new data
$.ajax({url: `_static/plugins.txt?nonce=${nonce}`, success:
function(result){
plugins = result.split('\n')
.reduce(function(acc,line){
if(line==""||line[0]=="#")
return acc;
acc.push(line.split(',').map(function(x){return x.trim()}));
return acc;
},[])
if(plugins.length>0)
$("#active-plugins").show();
plugins.map(function(plugin){
var link = plugin[2];
var name=`<a href="/${link}">${plugin[0]}</a>`;
var desc = plugin[1];
$("#plugins-table").append(`<tr><td>${name}</td><td>${desc}</td></tr>`)
})
}});
</script>
# Sphinx build info version 1
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
config: f03ccff803878db812e11b9e81b5e0f4
tags: 645f666f9bcd5a90fca523b33c5a78b7
.. Wattsworth documentation master file, created by
sphinx-quickstart on Tue Aug 1 11:55:55 2017.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
The Wattsworth Project
======================
Installing the Software
-----------------------
All of the software repositories are available at
https://git.wattsworth.net/wattsworth. The software has been tested on
64 bit Ubuntu Linux. While it is possible to run on Arm-based Single
Board Computers (eg Raspberry Pi), the software works best on x86 systems
such as the Intel NUC.
Use the Puppet repository to install the complete Wattsworth stack
.. code-block:: bash
$> sudo apt-get update
$> sudo apt-get install puppet
$> git clone https://git.wattsworth.net/wattsworth/puppet.git
$> cd puppet
$> sudo puppet apply --modulepath=./modules --verbose site.pp
Here is info about Joule and the Web Frontend
Another edit
.. toctree::
:maxdepth: 2
:caption: Contents:
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
/* This file intentionally left blank. */
/*
* doctools.js
* ~~~~~~~~~~~
*
* Sphinx JavaScript utilities for all documentation.
*
* :copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS.
* :license: BSD, see LICENSE for details.
*
*/
/**
* select a different prefix for underscore
*/
$u = _.noConflict();
/**
* make the code below compatible with browsers without
* an installed firebug like debugger
if (!window.console || !console.firebug) {
var names = ["log", "debug", "info", "warn", "error", "assert", "dir",
"dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace",
"profile", "profileEnd"];
window.console = {};
for (var i = 0; i < names.length; ++i)
window.console[names[i]] = function() {};
}
*/
/**
* small helper function to urldecode strings
*/
jQuery.urldecode = function(x) {
return decodeURIComponent(x).replace(/\+/g, ' ');
};
/**
* small helper function to urlencode strings
*/
jQuery.urlencode = encodeURIComponent;
/**
* This function returns the parsed url parameters of the
* current request. Multiple values per key are supported,
* it will always return arrays of strings for the value parts.
*/
jQuery.getQueryParameters = function(s) {
if (typeof s == 'undefined')
s = document.location.search;
var parts = s.substr(s.indexOf('?') + 1).split('&');
var result = {};
for (var i = 0; i < parts.length; i++) {
var tmp = parts[i].split('=', 2);
var key = jQuery.urldecode(tmp[0]);
var value = jQuery.urldecode(tmp[1]);
if (key in result)
result[key].push(value);
else
result[key] = [value];
}
return result;
};
/**
* highlight a given string on a jquery object by wrapping it in
* span elements with the given class name.
*/
jQuery.fn.highlightText = function(text, className) {
function highlight(node) {
if (node.nodeType == 3) {
var val = node.nodeValue;
var pos = val.toLowerCase().indexOf(text);
if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) {
var span = document.createElement("span");
span.className = className;
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
node.parentNode.insertBefore(span, node.parentNode.insertBefore(
document.createTextNode(val.substr(pos + text.length)),
node.nextSibling));
node.nodeValue = val.substr(0, pos);
}
}
else if (!jQuery(node).is("button, select, textarea")) {
jQuery.each(node.childNodes, function() {
highlight(this);
});
}
}
return this.each(function() {
highlight(this);
});
};
/*
* backward compatibility for jQuery.browser
* This will be supported until firefox bug is fixed.
*/
if (!jQuery.browser) {
jQuery.uaMatch = function(ua) {
ua = ua.toLowerCase();
var match = /(chrome)[ \/]([\w.]+)/.exec(ua) ||
/(webkit)[ \/]([\w.]+)/.exec(ua) ||
/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) ||
/(msie) ([\w.]+)/.exec(ua) ||
ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) ||
[];
return {
browser: match[ 1 ] || "",
version: match[ 2 ] || "0"
};
};
jQuery.browser = {};
jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true;
}
/**
* Small JavaScript module for the documentation.
*/
var Documentation = {
init : function() {
this.fixFirefoxAnchorBug();
this.highlightSearchWords();
this.initIndexTable();
},
/**
* i18n support
*/
TRANSLATIONS : {},
PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; },
LOCALE : 'unknown',
// gettext and ngettext don't access this so that the functions
// can safely bound to a different name (_ = Documentation.gettext)
gettext : function(string) {
var translated = Documentation.TRANSLATIONS[string];
if (typeof translated == 'undefined')
return string;
return (typeof translated == 'string') ? translated : translated[0];
},
ngettext : function(singular, plural, n) {
var translated = Documentation.TRANSLATIONS[singular];
if (typeof translated == 'undefined')
return (n == 1) ? singular : plural;
return translated[Documentation.PLURALEXPR(n)];
},
addTranslations : function(catalog) {
for (var key in catalog.messages)
this.TRANSLATIONS[key] = catalog.messages[key];
this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')');
this.LOCALE = catalog.locale;
},
/**
* add context elements like header anchor links
*/
addContextElements : function() {
$('div[id] > :header:first').each(function() {
$('<a class="headerlink">\u00B6</a>').
attr('href', '#' + this.id).
attr('title', _('Permalink to this headline')).
appendTo(this);
});
$('dt[id]').each(function() {
$('<a class="headerlink">\u00B6</a>').
attr('href', '#' + this.id).
attr('title', _('Permalink to this definition')).
appendTo(this);
});
},
/**
* workaround a firefox stupidity
* see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075
*/
fixFirefoxAnchorBug : function() {
if (document.location.hash)
window.setTimeout(function() {
document.location.href += '';
}, 10);
},
/**
* highlight the search words provided in the url in the text
*/
highlightSearchWords : function() {
var params = $.getQueryParameters();
var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : [];
if (terms.length) {
var body = $('div.body');
if (!body.length) {
body = $('body');
}
window.setTimeout(function() {
$.each(terms, function() {
body.highlightText(this.toLowerCase(), 'highlighted');
});
}, 10);
$('<p class="highlight-link"><a href="javascript:Documentation.' +
'hideSearchWords()">' + _('Hide Search Matches') + '</a></p>')
.appendTo($('#searchbox'));
}
},
/**
* init the domain index toggle buttons
*/
initIndexTable : function() {
var togglers = $('img.toggler').click(function() {
var src = $(this).attr('src');
var idnum = $(this).attr('id').substr(7);
$('tr.cg-' + idnum).toggle();
if (src.substr(-9) == 'minus.png')
$(this).attr('src', src.substr(0, src.length-9) + 'plus.png');
else
$(this).attr('src', src.substr(0, src.length-8) + 'minus.png');
}).css('display', '');
if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) {
togglers.click();
}
},
/**
* helper function to hide the search marks again
*/
hideSearchWords : function() {
$('#searchbox .highlight-link').fadeOut(300);
$('span.highlighted').removeClass('highlighted');
},
/**
* make the url absolute
*/
makeURL : function(relativeURL) {
return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL;
},
/**
* get the current relative url
*/
getCurrentURL : function() {
var path = document.location.pathname;
var parts = path.split(/\//);
$.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() {
if (this == '..')
parts.pop();
});
var url = parts.join('/');
return path.substring(url.lastIndexOf('/') + 1, path.length - 1);
},
initOnKeyListeners: function() {
$(document).keyup(function(event) {
var activeElementType = document.activeElement.tagName;
// don't navigate when in search box or textarea
if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT') {
switch (event.keyCode) {
case 37: // left
var prevHref = $('link[rel="prev"]').prop('href');
if (prevHref) {
window.location.href = prevHref;
return false;
}
case 39: // right
var nextHref = $('link[rel="next"]').prop('href');
if (nextHref) {
window.location.href = nextHref;
return false;
}
}
}
});
}
};
// quick alias for translations
_ = Documentation.gettext;
$(document).ready(function() {
Documentation.init();
});
\ No newline at end of file
This diff could not be displayed because it is too large.
.highlight .hll { background-color: #ffffcc }
.highlight { background: #eeffcc; }
.highlight .c { color: #408090; font-style: italic } /* Comment */
.highlight .err { border: 1px solid #FF0000 } /* Error */
.highlight .k { color: #007020; font-weight: bold } /* Keyword */
.highlight .o { color: #666666 } /* Operator */
.highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */
.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */
.highlight .cp { color: #007020 } /* Comment.Preproc */
.highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */
.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */
.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */
.highlight .gd { color: #A00000 } /* Generic.Deleted */
.highlight .ge { font-style: italic } /* Generic.Emph */
.highlight .gr { color: #FF0000 } /* Generic.Error */
.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
.highlight .gi { color: #00A000 } /* Generic.Inserted */
.highlight .go { color: #333333 } /* Generic.Output */
.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
.highlight .gs { font-weight: bold } /* Generic.Strong */
.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
.highlight .gt { color: #0044DD } /* Generic.Traceback */
.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */
.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */
.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */
.highlight .kp { color: #007020 } /* Keyword.Pseudo */
.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */
.highlight .kt { color: #902000 } /* Keyword.Type */
.highlight .m { color: #208050 } /* Literal.Number */
.highlight .s { color: #4070a0 } /* Literal.String */
.highlight .na { color: #4070a0 } /* Name.Attribute */
.highlight .nb { color: #007020 } /* Name.Builtin */
.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */
.highlight .no { color: #60add5 } /* Name.Constant */
.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */
.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */
.highlight .ne { color: #007020 } /* Name.Exception */
.highlight .nf { color: #06287e } /* Name.Function */
.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */
.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */
.highlight .nv { color: #bb60d5 } /* Name.Variable */
.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */
.highlight .w { color: #bbbbbb } /* Text.Whitespace */
.highlight .mb { color: #208050 } /* Literal.Number.Bin */
.highlight .mf { color: #208050 } /* Literal.Number.Float */
.highlight .mh { color: #208050 } /* Literal.Number.Hex */
.highlight .mi { color: #208050 } /* Literal.Number.Integer */
.highlight .mo { color: #208050 } /* Literal.Number.Oct */
.highlight .sa { color: #4070a0 } /* Literal.String.Affix */
.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */
.highlight .sc { color: #4070a0 } /* Literal.String.Char */
.highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */
.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */
.highlight .s2 { color: #4070a0 } /* Literal.String.Double */
.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */
.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */
.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */
.highlight .sx { color: #c65d09 } /* Literal.String.Other */
.highlight .sr { color: #235388 } /* Literal.String.Regex */
.highlight .s1 { color: #4070a0 } /* Literal.String.Single */
.highlight .ss { color: #517918 } /* Literal.String.Symbol */
.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */
.highlight .fm { color: #06287e } /* Name.Function.Magic */
.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */
.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */
.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */
.highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */
.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */
\ No newline at end of file
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>Index &#8212; Wattsworth 1.0 documentation</title>
<link rel="stylesheet" href="_static/alabaster.css" type="text/css" />
<link rel="stylesheet" href="_static/pygments.css" type="text/css" />
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT: './',
VERSION: '1.0',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: true,
SOURCELINK_SUFFIX: '.txt'
};
</script>
<script type="text/javascript" src="_static/jquery.js"></script>
<script type="text/javascript" src="_static/underscore.js"></script>
<script type="text/javascript" src="_static/doctools.js"></script>
<link rel="index" title="Index" href="#" />
<link rel="search" title="Search" href="search.html" />
<link rel="stylesheet" href="_static/custom.css" type="text/css" />
<meta name="viewport" content="width=device-width, initial-scale=0.9, maximum-scale=0.9" />
</head>
<body>
<div class="document">
<div class="documentwrapper">
<div class="bodywrapper">
<div class="body" role="main">
<h1 id="index">Index</h1>
<div class="genindex-jumpbox">
</div>
</div>
</div>
</div>
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
<div class="sphinxsidebarwrapper">
<div class="relations">
<h3>Related Topics</h3>
<ul>
<li><a href="index.html">Documentation overview</a><ul>
</ul></li>
</ul>
</div>
<div id="searchbox" style="display: none" role="search">
<h3>Quick search</h3>
<form class="search" action="search.html" method="get">
<div><input type="text" name="q" /></div>
<div><input type="submit" value="Go" /></div>
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
</div>
</div>
<div class="clearer"></div>
</div>
<div class="footer">
&copy;2017, John Donnal, James Paris.
|
Powered by <a href="http://sphinx-doc.org/">Sphinx 1.6.2</a>
&amp; <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.10</a>
</div>
</body>
</html>
\ No newline at end of file
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>The Wattsworth Project &#8212; Wattsworth 1.0 documentation</title>
<link rel="stylesheet" href="_static/alabaster.css" type="text/css" />
<link rel="stylesheet" href="_static/pygments.css" type="text/css" />
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT: './',
VERSION: '1.0',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: true,
SOURCELINK_SUFFIX: '.txt'
};
</script>
<script type="text/javascript" src="_static/jquery.js"></script>
<script type="text/javascript" src="_static/underscore.js"></script>
<script type="text/javascript" src="_static/doctools.js"></script>
<link rel="index" title="Index" href="genindex.html" />
<link rel="search" title="Search" href="search.html" />
<link rel="stylesheet" href="_static/custom.css" type="text/css" />
<meta name="viewport" content="width=device-width, initial-scale=0.9, maximum-scale=0.9" />
</head>
<body>
<div class="document">
<div class="documentwrapper">
<div class="bodywrapper">
<div class="body" role="main">
<div class="section" id="the-wattsworth-project">
<h1>The Wattsworth Project<a class="headerlink" href="#the-wattsworth-project" title="Permalink to this headline"></a></h1>
<div class="section" id="installing-the-software">
<h2>Installing the Software<a class="headerlink" href="#installing-the-software" title="Permalink to this headline"></a></h2>
<p>All of the software repositories are available at
<a class="reference external" href="https://git.wattsworth.net/wattsworth">https://git.wattsworth.net/wattsworth</a>. The software has been tested on
64 bit Ubuntu Linux. While it is possible to run on Arm-based Single
Board Computers (eg Raspberry Pi), the software works best on x86 systems
such as the Intel NUC.</p>
<p>Use the Puppet repository to install the complete Wattsworth stack</p>
<div class="highlight-bash"><div class="highlight"><pre><span></span>$&gt; sudo apt-get update
$&gt; sudo apt-get install puppet
$&gt; git clone https://git.wattsworth.net/wattsworth/puppet.git
$&gt; <span class="nb">cd</span> puppet
$&gt; sudo puppet apply --modulepath<span class="o">=</span>./modules --verbose site.pp
</pre></div>
</div>
<p>Here is info about Joule and the Web Frontend
Another edit</p>
<div class="toctree-wrapper compound">
</div>
</div>
</div>
<div class="section" id="indices-and-tables">
<h1>Indices and tables<a class="headerlink" href="#indices-and-tables" title="Permalink to this headline"></a></h1>
<ul class="simple">
<li><a class="reference internal" href="genindex.html"><span class="std std-ref">Index</span></a></li>
<li><a class="reference internal" href="py-modindex.html"><span class="std std-ref">Module Index</span></a></li>
<li><a class="reference internal" href="search.html"><span class="std std-ref">Search Page</span></a></li>
</ul>
</div>
</div>
</div>
</div>
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
<div class="sphinxsidebarwrapper">
<h3><a href="#">Table Of Contents</a></h3>
<ul>
<li><a class="reference internal" href="#">The Wattsworth Project</a><ul>
<li><a class="reference internal" href="#installing-the-software">Installing the Software</a></li>
</ul>
</li>
<li><a class="reference internal" href="#indices-and-tables">Indices and tables</a></li>
</ul>
<div class="relations">
<h3>Related Topics</h3>
<ul>
<li><a href="#">Documentation overview</a><ul>
</ul></li>
</ul>
</div>
<div role="note" aria-label="source link">
<h3>This Page</h3>
<ul class="this-page-menu">
<li><a href="_sources/index.rst.txt"
rel="nofollow">Show Source</a></li>
</ul>
</div>
<div id="searchbox" style="display: none" role="search">
<h3>Quick search</h3>
<form class="search" action="search.html" method="get">
<div><input type="text" name="q" /></div>
<div><input type="submit" value="Go" /></div>
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
</div>
</div>
<div class="clearer"></div>
</div>
<div class="footer">
&copy;2017, John Donnal, James Paris.
|
Powered by <a href="http://sphinx-doc.org/">Sphinx 1.6.2</a>
&amp; <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.10</a>
|
<a href="_sources/index.rst.txt"
rel="nofollow">Page source</a>
</div>
</body>
</html>
\ No newline at end of file
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>Search &#8212; Wattsworth 1.0 documentation</title>
<link rel="stylesheet" href="_static/alabaster.css" type="text/css" />
<link rel="stylesheet" href="_static/pygments.css" type="text/css" />
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT: './',
VERSION: '1.0',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: true,
SOURCELINK_SUFFIX: '.txt'
};
</script>
<script type="text/javascript" src="_static/jquery.js"></script>
<script type="text/javascript" src="_static/underscore.js"></script>
<script type="text/javascript" src="_static/doctools.js"></script>
<script type="text/javascript" src="_static/searchtools.js"></script>
<link rel="index" title="Index" href="genindex.html" />
<link rel="search" title="Search" href="#" />
<script type="text/javascript">
jQuery(function() { Search.loadIndex("searchindex.js"); });
</script>
<script type="text/javascript" id="searchindexloader"></script>
<link rel="stylesheet" href="_static/custom.css" type="text/css" />
<meta name="viewport" content="width=device-width, initial-scale=0.9, maximum-scale=0.9" />
</head>
<body>
<div class="document">
<div class="documentwrapper">
<div class="bodywrapper">
<div class="body" role="main">
<h1 id="search-documentation">Search</h1>
<div id="fallback" class="admonition warning">
<script type="text/javascript">$('#fallback').hide();</script>
<p>
Please activate JavaScript to enable the search
functionality.
</p>
</div>
<p>
From here you can search these documents. Enter your search
words into the box below and click "search". Note that the search
function will automatically search for all of the words. Pages
containing fewer words won't appear in the result list.
</p>
<form action="" method="get">
<input type="text" name="q" value="" />
<input type="submit" value="search" />
<span id="search-progress" style="padding-left: 10px"></span>
</form>
<div id="search-results">
</div>
</div>
</div>
</div>
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
<div class="sphinxsidebarwrapper"><div class="relations">
<h3>Related Topics</h3>
<ul>
<li><a href="index.html">Documentation overview</a><ul>
</ul></li>
</ul>
</div>
</div>
</div>
<div class="clearer"></div>
</div>
<div class="footer">
&copy;2017, John Donnal, James Paris.
|
Powered by <a href="http://sphinx-doc.org/">Sphinx 1.6.2</a>
&amp; <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.10</a>
</div>
</body>
</html>
\ No newline at end of file
Search.setIndex({docnames:["index"],envversion:52,filenames:["index.rst"],objects:{},objnames:{},objtypes:{},terms:{"while":0,Use:0,about:0,all:0,anoth:0,appli:0,apt:0,arm:0,avail:0,base:0,bash:[],been:0,best:0,bit:0,block:[],board:0,clone:0,code:[],complet:0,comput:0,edit:0,frontend:0,get:0,git:0,has:0,here:0,http:0,index:0,info:0,intel:0,joul:0,linux:0,modul:0,modulepath:0,modules_path:[],net:0,nuc:0,page:0,possibl:0,puppet:0,raspberri:0,repositori:0,run:0,search:0,singl:0,site:0,stack:0,sudo:0,system:0,test:0,ubuntu:0,updat:0,verbos:0,web:0,work:0,x86:0},titles:["The Wattsworth Project"],titleterms:{The:0,document:[],indic:0,instal:0,project:0,softwar:0,tabl:0,wattsworth:0,welcom:[]}})
\ No newline at end of file
.. Wattsworth documentation master file, created by
sphinx-quickstart on Tue Aug 1 11:55:55 2017.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
The Wattsworth Project
======================
Installing the Software
-----------------------
All of the software repositories are available at
https://git.wattsworth.net/wattsworth. The software has been tested on
64 bit Ubuntu Linux. While it is possible to run on Arm-based Single
Board Computers (eg Raspberry Pi), the software works best on x86 systems
such as the Intel NUC.
Use the Puppet repository to install the complete Wattsworth stack
.. code-block:: bash
$> sudo apt-get update
$> sudo apt-get install puppet
$> git clone https://git.wattsworth.net/wattsworth/puppet.git
$> cd puppet
$> sudo puppet apply --modulepath=./modules --verbose site.pp
Here is info about Joule and the Web Frontend
Another edit
.. toctree::
:maxdepth: 2
:caption: Contents:
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
......@@ -3,7 +3,7 @@
# Packages required by multiple modules
#
class common {
$pkgs=['build-essential','screen','octave','emacs','openvpn']
$pkgs=['build-essential','screen','octave','emacs','openvpn', 'gnome-tweak-tool', 'nmap']
package { $pkgs:
ensure => present
......
[Unit]
Description=check and repair nilmdb database
Before=apache2.service
[Service]
Type=oneshot
StandardOutput=journal
ExecStart = /usr/local/bin/nilmdb-fsck -f -n /opt/nilmdb/db
[Install]
WantedBy=multi-user.target
\ No newline at end of file
......@@ -37,6 +37,7 @@ class nilmdb {
group => root,
mode => '0644',
}
exec{'a2ensite 10-nilmdb.conf':
creates => '/etc/apache2/sites-enabled/10-nilmdb.conf',
path => ['/usr/sbin','/usr/bin'],
......@@ -67,6 +68,19 @@ class nilmdb {
source => 'puppet:///modules/nilmdb/nilmdb.wsgi',
notify => Exec['nilmdb_restart_apache']
}
file{'/etc/systemd/system/nilmdb-fsck.service':
ensure => present,
owner => root,
group => root,
mode => '0644',
source => 'puppet:///modules/nilmdb/nilmdb-fsck.service',
notify => Exec['install_nilmdb_fsck_service']
}
exec{'install_nilmdb_fsck_service':
command => '/bin/systemctl enable nilmdb-fsck.service',
refreshonly => true,
}
#mod_wsgi tries to access user home directory
#nilmdb is a system account but just create a stub
#directory to satisfy wsgi :P
......
source 'https://rubygems.org'
puppetversion = ENV.key?('PUPPET_VERSION') ? "= #{ENV['PUPPET_VERSION']}" : ['>= 3.3']
gem 'puppet', puppetversion
gem 'puppetlabs_spec_helper', '>= 0.1.0'
gem 'puppet-lint', '>= 0.3.2'
gem 'facter', '>= 1.7.0'
# railsapi
#### Table of Contents
1. [Overview](#overview)
2. [Module Description - What the module does and why it is useful](#module-description)
3. [Setup - The basics of getting started with railsapi](#setup)
* [What railsapi affects](#what-railsapi-affects)
* [Setup requirements](#setup-requirements)
* [Beginning with railsapi](#beginning-with-railsapi)
4. [Usage - Configuration options and additional functionality](#usage)
5. [Reference - An under-the-hood peek at what the module is doing and how](#reference)
5. [Limitations - OS compatibility, etc.](#limitations)
6. [Development - Guide for contributing to the module](#development)
## Overview
A one-maybe-two sentence summary of what the module does/what problem it solves.
This is your 30 second elevator pitch for your module. Consider including
OS/Puppet version it works with.
## Module Description
If applicable, this section should have a brief description of the technology
the module integrates with and what that integration enables. This section
should answer the questions: "What does this module *do*?" and "Why would I use
it?"
If your module has a range of functionality (installation, configuration,
management, etc.) this is the time to mention it.
## Setup
### What railsapi affects
* A list of files, packages, services, or operations that the module will alter,
impact, or execute on the system it's installed on.
* This is a great place to stick any warnings.
* Can be in list or paragraph form.
### Setup Requirements **OPTIONAL**
If your module requires anything extra before setting up (pluginsync enabled,
etc.), mention it here.
### Beginning with railsapi
The very basic steps needed for a user to get the module up and running.
If your most recent release breaks compatibility or requires particular steps
for upgrading, you may wish to include an additional section here: Upgrading
(For an example, see http://forge.puppetlabs.com/puppetlabs/firewall).
## Usage
Put the classes, types, and resources for customizing, configuring, and doing
the fancy stuff with your module here.
## Reference
Here, list the classes, types, providers, facts, etc contained in your module.
This section should include all of the under-the-hood workings of your module so
people know what the module is touching on their system but don't need to mess
with things. (We are working on automating this section!)
## Limitations
This is where you list OS compatibility, version compatibility, etc.
## Development
Since your module is awesome, other users will want to play with it. Let them
know what the ground rules for contributing are.
## Release Notes/Contributors/Etc **Optional**
If you aren't using changelog, put your release notes here (though you should
consider using changelog). You may also add any additional sections you feel are
necessary or important to include here. Please use the `## ` header.
require 'rubygems'
require 'puppetlabs_spec_helper/rake_tasks'
require 'puppet-lint/tasks/puppet-lint'
PuppetLint.configuration.send('disable_80chars')
PuppetLint.configuration.ignore_paths = ["spec/**/*.pp", "pkg/**/*.pp"]
desc "Validate manifests, templates, and ruby files"
task :validate do
Dir['manifests/**/*.pp'].each do |manifest|
sh "puppet parser validate --noop #{manifest}"
end
Dir['spec/**/*.rb','lib/**/*.rb'].each do |ruby_file|
sh "ruby -c #{ruby_file}" unless ruby_file =~ /spec\/fixtures/
end
Dir['templates/**/*.erb'].each do |template|
sh "erb -P -x -T '-' #{template} | ruby -c"
end
end
# == Class: rails_api
#
# Install and bootstrap the rails api
#
class rails_api {
require nilmdb
$deps = ['ruby-dev','libsqlite3-dev','nodejs']
package {$deps:,
ensure => present
}
user{'rails':
ensure => present,
system => true
}
vcsrepo{'/opt/api':
ensure => latest,
provider => git,
source => 'https://git.wattsworth.net/wattsworth/control-api.git',
require => User['rails'],
notify => Exec['bundle_install','db_migrate','local_bootstrap'],
owner => 'rails'
}
package {'bundler':
ensure => present,
provider => gem,
require => Package[$deps]
}
#install the gems
exec {'bundle_install':
command => 'bundle install --without test development --path vendor/bundle',
cwd => '/opt/api',
environment => ['RAILS_ENV=local'],
path => ['/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'],
refreshonly => true,
user => 'rails',
require => [Vcsrepo['/opt/api'],Package['bundler']]
}
exec {'db_setup':
command => 'bundle exec rake db:setup',
cwd => '/opt/api',
environment => ['RAILS_ENV=local'],
path => ['/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'],
creates => '/opt/api/db/local.sqlite',
user => 'rails',
require => [Vcsrepo['/opt/api'],Package['bundler'],Exec['bundle_install']]
}
#migrate the database
exec {'db_migrate':
command => 'bundle exec rake db:migrate',
cwd => '/opt/api',
environment => ['RAILS_ENV=local'],
path => ['/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'],
user => 'rails',
refreshonly => true,
require => [Vcsrepo['/opt/api'],Package['bundler'],Exec['db_setup']]
}
#migrate the database
exec {'local_bootstrap':
command => 'bundle exec rake local:bootstrap',
cwd => '/opt/api',
environment => ['RAILS_ENV=local'],
path => ['/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'],
user => 'rails',
refreshonly => true,
require => [Vcsrepo['/opt/api'],Package['bundler'],Exec['db_migrate']]
}
}
{
"name": "jdonnal-railsapi",
"version": "0.1.0",
"author": "John Donnal",
"summary": null,
"license": "Apache 2.0",
"source": "",
"project_page": null,
"issues_url": null,
"dependencies": [
{"name":"puppetlabs-stdlib","version_requirement":">= 1.0.0"}
]
}
require 'spec_helper'
describe 'railsapi' do
context 'with defaults for all parameters' do
it { should contain_class('railsapi') }
end
end
require 'puppetlabs_spec_helper/module_spec_helper'
# The baseline for module testing used by Puppet Labs is that each manifest
# should have a corresponding test manifest that declares that class or defined
# type.
#
# Tests are then run by using puppet apply --noop (to check for compilation
# errors and view a log of events) or by fully applying the test in a virtual
# environment (to compare the resulting system state to the desired state).
#
# Learn more about module testing here:
# http://docs.puppetlabs.com/guides/tests_smoke.html
#
include railsapi
<VirtualHost *:80>
ServerName wattsworth.local
DocumentRoot /var/www/frontend
Alias /api /opt/lumen/public
<Directory /opt/lumen/public>
Alias /api /opt/api/public
<Directory /opt/api/public>
Options +Indexes +FollowSymLinks -MultiViews
AllowOverride None
Order allow,deny
......@@ -11,7 +11,8 @@
</Directory>
<Location /api>
PassengerBaseURI /api
PassengerAppRoot /opt/lumen
PassengerAppRoot /opt/api
RailsEnv local
</Location>
<Directory /var/www/frontend>
Options Indexes FollowSymLinks MultiViews
......
# Change Log
All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org/).
## Supported Release [2.0.0]
### Summary
This is a major release that **drops support for Puppet 3** and addresses an issue with the subversion provider.
### Added
- Documentation for using an non-standard ssh port ([MODULES-1910](https://tickets.puppet.com/browse/MODULES-1910))
- Autorequire for the subversion package in the vcsrepo type ([MODULES-4722](https://tickets.puppetlabs.com/browse/MODULES-4722))
- Puppet 5 support in metadata ([MODULES-5144](https://tickets.puppet.com/browse/MODULES-5144))
### Changed
- Lower bound of Puppet requirement to 4.7.0 ([MODULES-4823](https://tickets.puppetlabs.com/browse/MODULES-4823))
### Fixed
- Solaris `SSH_AUTH_SOCKET` issue
- Issue with subversion provider ([MODULES-4280](https://tickets.puppetlabs.com/browse/MODULES-4280))
- `force` parameter to return a boolean instead of a string ([MODULES-4864](https://tickets.puppetlabs.com/browse/MODULES-4864))
## [1.5.0] - 2016-12-19 Supported Release
### Summary
Release featuring some refactoring and improvements around git's `ensurable`.
### Bugfixes
- `ensure => absent` fix
### Features
- `:source` property added
- Improved `ensure` handling for git provider
- General refactoring for all providers
- Various test improvements
## [1.4.0] - 2015-09-06 Supported Release
### Summary
Small release for a new feature and added compatibility.
### Features
- Git repositories can be cloned as mirror or bare repos.
- Added STDERR to Puppet's output.
- Added Debian 8 and Ubuntu 16.04 compatibility.
## [1.3.2] - 2015-12-08 Supported Release
### Summary
Small release for support of newer PE versions. This increments the version of PE in the metadata.json file.
## [1.3.1] - 2015-07-28 Supported Release
### Summary
This release includes a number of bugfixes and test updates.
### Fixed
- Fix for detached HEAD on git 2.4+.
- Git provider doesn't ignore revision property when depth is used (MODULES-2131).
- Tests fixed.
- Check if submodules == true before calling update_submodules.
## [1.3.0] - 2015-05-19 Supported Release
### Summary
This release adds git provider remote handling, svn conflict resolution, and fixes the git provider when /tmp is mounted noexec.
### Added
- `source` property now takes a hash of sources for the git provider's remotes.
- Added `submodules` parameter to skip submodule initialization for the git provider.
- Added `conflict` to the svn provider to resolve conflicts.
- Added `branch` parameter to specify clone branch.
- Readme rewritten.
### Fixed
- The git provider now works even if `/tmp` is noexec.
## [1.2.0] - 2014-11-04 Supported Release
### Summary
This release includes some improvements for git, mercurial, and cvs providers, and fixes the bug where there were warnings about multiple default providers.
### Added
- Update git and mercurial providers to set UID with `Puppet::Util::Execution.execute` instead of `su`
- Allow git excludes to be string or array
- Add `user` feature to cvs provider
### Fixed
- No more warnings about multiple default providers! (MODULES-428)
## [1.1.0] - 2014-07-14 Supported Release
### Summary
This release adds a Perforce provider\* and corrects the git provider behavior
when using `ensure => latest`.
\*(Only git provider is currently supported.)
### Added
- New Perforce provider.
### Fixed
- Fix behavior with `ensure => latest` and detached HEAD. (MODULES-660)
- Spec test fixes.
## [1.0.2] - 2014-06-30 Supported Release
### Summary
This supported release adds SLES 11 to the list of compatible OSs and
documentation updates for support.
## [1.0.1] - 2014-06-17 Supported Release
### Summary
This release is the first supported release of vcsrepo. The readme has been
greatly improved.
### Added
- Updated and expanded readme to follow readme template.
### Fixed
- Remove SLES from compatability metadata.
- Unpin rspec development dependencies.
- Update acceptance level testing.
## [1.0.0] - 2014-06-04
### Summary
This release focuses on a number of bugfixes, and also has some
new features for Bzr and Git.
### Added
- Bzr:
- Call set_ownership.
- Git:
- Add ability for shallow clones.
- Use -a and desired for HARD resets.
- Use rev-parse to get tag canonical revision.
### Fixed
- HG:
- Only add ssh options when it's talking to the network.
- Git:
- Fix for issue with detached HEAD.
- `force => true` will now destroy and recreate repo.
- Actually use the remote parameter.
- Use origin/master instead of origin/HEAD when on master.
- SVN:
- Fix svnlook behavior with plain directories.
## 0.2.0 - 2013-11-13
### Summary
This release mainly focuses on a number of bugfixes, which should
significantly improve the reliability of Git and SVN. Thanks to
our many contributors for all of these fixes!
### Added
- Git:
- Add autorequire for `Package['git']`.
- HG:
- Allow user and identity properties.
- Bzr:
- "ensure => latest" support.
- SVN:
- Added configuration parameter.
- Add support for master svn repositories.
- CVS:
- Allow for setting the CVS_RSH environment variable.
### Fixed
- Handle Puppet::Util[::Execution].withenv for 2.x and 3.x properly.
- Change path_empty? to not do full directory listing.
- Overhaul spec tests to work with rspec2.
- Git:
- Improve Git SSH usage documentation.
- Add ssh session timeouts to prevent network issues from blocking runs.
- Fix git provider checkout of a remote ref on an existing repo.
- Allow unlimited submodules (thanks to --recursive).
- Use git checkout --force instead of short -f everywhere.
- Update git provider to handle checking out into an existing (empty) dir.
- SVN:
- Handle force property.
- Adds support for changing upstream repo url.
- Check that the URL of the WC matches the URL from the manifest.
- Changed from using "update" to "switch".
- Handle revision update without source switch.
- Fix svn provider to look for '^Revision:' instead of '^Last Changed Rev:'.
- CVS:
- Documented the "module" attribute.
[2.0.0]: https://github.com/puppetlabs/puppetlabs-vcsrepo/compare/1.5.0...2.0.0
[1.5.0]: https://github.com/puppetlabs/puppetlabs-vcsrepo/compare/1.4.0...1.5.0
[1.4.0]: https://github.com/puppetlabs/puppetlabs-vcsrepo/compare/1.3.2...1.4.0
[1.3.2]: https://github.com/puppetlabs/puppetlabs-vcsrepo/compare/1.3.1...1.3.2
[1.3.1]: https://github.com/puppetlabs/puppetlabs-vcsrepo/compare/1.3.0...1.3.1
[1.3.0]: https://github.com/puppetlabs/puppetlabs-vcsrepo/compare/1.2.0...1.3.0
[1.2.0]: https://github.com/puppetlabs/puppetlabs-vcsrepo/compare/1.1.0...1.2.0
[1.1.0]: https://github.com/puppetlabs/puppetlabs-vcsrepo/compare/1.0.2...1.1.0
[1.0.2]: https://github.com/puppetlabs/puppetlabs-vcsrepo/compare/1.0.1...1.0.2
[1.0.1]: https://github.com/puppetlabs/puppetlabs-vcsrepo/compare/1.0.0...1.0.1
[1.0.0]: https://github.com/puppetlabs/puppetlabs-vcsrepo/compare/0.2.0...1.0.0
Checklist (and a short version for the impatient)
=================================================
* Commits:
- Make commits of logical units.
- Check for unnecessary whitespace with "git diff --check" before
committing.
- Commit using Unix line endings (check the settings around "crlf" in
git-config(1)).
- Do not check in commented out code or unneeded files.
- The first line of the commit message should be a short
description (50 characters is the soft limit, excluding ticket
number(s)), and should skip the full stop.
- Associate the issue in the message. The first line should include
the issue number in the form "(#XXXX) Rest of message".
- The body should provide a meaningful commit message, which:
- uses the imperative, present tense: "change", not "changed" or
"changes".
- includes motivation for the change, and contrasts its
implementation with the previous behavior.
- Make sure that you have tests for the bug you are fixing, or
feature you are adding.
- Make sure the test suites passes after your commit:
`bundle exec rspec spec/acceptance` More information on [testing](#Testing) below
- When introducing a new feature, make sure it is properly
documented in the README.md
* Submission:
* Pre-requisites:
- Make sure you have a [GitHub account](https://github.com/join)
- [Create a ticket](https://tickets.puppet.com/secure/CreateIssue!default.jspa), or [watch the ticket](https://tickets.puppet.com/browse/) you are patching for.
* Preferred method:
- Fork the repository on GitHub.
- Push your changes to a topic branch in your fork of the
repository. (the format ticket/1234-short_description_of_change is
usually preferred for this project).
- Submit a pull request to the repository in the puppetlabs
organization.
The long version
================
1. Make separate commits for logically separate changes.
Please break your commits down into logically consistent units
which include new or changed tests relevant to the rest of the
change. The goal of doing this is to make the diff easier to
read for whoever is reviewing your code. In general, the easier
your diff is to read, the more likely someone will be happy to
review it and get it into the code base.
If you are going to refactor a piece of code, please do so as a
separate commit from your feature or bug fix changes.
We also really appreciate changes that include tests to make
sure the bug is not re-introduced, and that the feature is not
accidentally broken.
Describe the technical detail of the change(s). If your
description starts to get too long, that is a good sign that you
probably need to split up your commit into more finely grained
pieces.
Commits which plainly describe the things which help
reviewers check the patch and future developers understand the
code are much more likely to be merged in with a minimum of
bike-shedding or requested changes. Ideally, the commit message
would include information, and be in a form suitable for
inclusion in the release notes for the version of Puppet that
includes them.
Please also check that you are not introducing any trailing
whitespace or other "whitespace errors". You can do this by
running "git diff --check" on your changes before you commit.
2. Sending your patches
To submit your changes via a GitHub pull request, we _highly_
recommend that you have them on a topic branch, instead of
directly on "master".
It makes things much easier to keep track of, especially if
you decide to work on another thing before your first change
is merged in.
GitHub has some pretty good
[general documentation](http://help.github.com/) on using
their site. They also have documentation on
[creating pull requests](http://help.github.com/send-pull-requests/).
In general, after pushing your topic branch up to your
repository on GitHub, you can switch to the branch in the
GitHub UI and click "Pull Request" towards the top of the page
in order to open a pull request.
3. Update the related GitHub issue.
If there is a GitHub issue associated with the change you
submitted, then you should update the ticket to include the
location of your branch, along with any other commentary you
may wish to make.
Testing
=======
Getting Started
---------------
Our puppet modules provide [`Gemfile`](./Gemfile)s which can tell a ruby
package manager such as [bundler](http://bundler.io/) what Ruby packages,
or Gems, are required to build, develop, and test this software.
Please make sure you have [bundler installed](http://bundler.io/#getting-started)
on your system, then use it to install all dependencies needed for this project,
by running
```shell
% bundle install
Fetching gem metadata from https://rubygems.org/........
Fetching gem metadata from https://rubygems.org/..
Using rake (10.1.0)
Using builder (3.2.2)
-- 8><-- many more --><8 --
Using rspec-system-puppet (2.2.0)
Using serverspec (0.6.3)
Using rspec-system-serverspec (1.0.0)
Using bundler (1.3.5)
Your bundle is complete!
Use `bundle show [gemname]` to see where a bundled gem is installed.
```
NOTE some systems may require you to run this command with sudo.
If you already have those gems installed, make sure they are up-to-date:
```shell
% bundle update
```
With all dependencies in place and up-to-date we can now run the tests:
```shell
% bundle exec rake spec
```
This will execute all the [rspec tests](http://rspec-puppet.com/) tests
under [spec/defines](./spec/defines), [spec/classes](./spec/classes),
and so on. rspec tests may have the same kind of dependencies as the
module they are testing. While the module defines in its [Modulefile](./Modulefile),
rspec tests define them in [.fixtures.yml](./fixtures.yml).
Some puppet modules also come with [beaker](https://github.com/puppetlabs/beaker)
tests. These tests spin up a virtual machine under
[VirtualBox](https://www.virtualbox.org/)) with, controlling it with
[Vagrant](http://www.vagrantup.com/) to actually simulate scripted test
scenarios. In order to run these, you will need both of those tools
installed on your system.
You can run them by issuing the following command
```shell
% bundle exec rake spec_clean
% bundle exec rspec spec/acceptance
```
This will now download a pre-fabricated image configured in the [default node-set](./spec/acceptance/nodesets/default.yml),
install puppet, copy this module and install its dependencies per [spec/spec_helper_acceptance.rb](./spec/spec_helper_acceptance.rb)
and then run all the tests under [spec/acceptance](./spec/acceptance).
Writing Tests
-------------
XXX getting started writing tests.
If you have commit access to the repository
===========================================
Even if you have commit access to the repository, you will still need to
go through the process above, and have someone else review and merge
in your changes. The rule is that all changes must be reviewed by a
developer on the project (that did not write the code) to ensure that
all changes go through a code review process.
Having someone other than the author of the topic branch recorded as
performing the merge is the record that they performed the code
review.
Additional Resources
====================
* [Getting additional help](http://puppet.com/community/get-help)
* [Writing tests](https://docs.puppet.com/guides/module_guides/bgtm.html#step-three-module-testing)
* [General GitHub documentation](http://help.github.com/)
* [GitHub pull request documentation](http://help.github.com/send-pull-requests/)
#This file is generated by ModuleSync, do not edit.
source ENV['GEM_SOURCE'] || "https://rubygems.org"
# Determines what type of gem is requested based on place_or_version.
def gem_type(place_or_version)
if place_or_version =~ /^git:/
:git
elsif place_or_version =~ /^file:/
:file
else
:gem
end
end
# Find a location or specific version for a gem. place_or_version can be a
# version, which is most often used. It can also be git, which is specified as
# `git://somewhere.git#branch`. You can also use a file source location, which
# is specified as `file://some/location/on/disk`.
def location_for(place_or_version, fake_version = nil)
if place_or_version =~ /^(git[:@][^#]*)#(.*)/
[fake_version, { :git => $1, :branch => $2, :require => false }].compact
elsif place_or_version =~ /^file:\/\/(.*)/
['>= 0', { :path => File.expand_path($1), :require => false }]
else
[place_or_version, { :require => false }]
end
end
# Used for gem conditionals
supports_windows = false
ruby_version_segments = Gem::Version.new(RUBY_VERSION.dup).segments
minor_version = "#{ruby_version_segments[0]}.#{ruby_version_segments[1]}"
group :development do
gem "puppet-module-posix-default-r#{minor_version}", :require => false, :platforms => "ruby"
gem "puppet-module-win-default-r#{minor_version}", :require => false, :platforms => ["mswin", "mingw", "x64_mingw"]
gem "puppet-module-posix-dev-r#{minor_version}", :require => false, :platforms => "ruby"
gem "puppet-module-win-dev-r#{minor_version}", :require => false, :platforms => ["mswin", "mingw", "x64_mingw"]
gem "json_pure", '<= 2.0.1', :require => false if Gem::Version.new(RUBY_VERSION.dup) < Gem::Version.new('2.0.0')
gem "fast_gettext", '1.1.0', :require => false if Gem::Version.new(RUBY_VERSION.dup) < Gem::Version.new('2.1.0')
gem "fast_gettext", :require => false if Gem::Version.new(RUBY_VERSION.dup) >= Gem::Version.new('2.1.0')
end
group :system_tests do
gem "puppet-module-posix-system-r#{minor_version}", :require => false, :platforms => "ruby"
gem "puppet-module-win-system-r#{minor_version}", :require => false, :platforms => ["mswin", "mingw", "x64_mingw"]
gem "beaker", *location_for(ENV['BEAKER_VERSION'] || '>= 3')
gem "beaker-pe", :require => false
gem "beaker-rspec", *location_for(ENV['BEAKER_RSPEC_VERSION'])
gem "beaker-hostgenerator", *location_for(ENV['BEAKER_HOSTGENERATOR_VERSION'])
gem "beaker-abs", *location_for(ENV['BEAKER_ABS_VERSION'] || '~> 0.1')
end
gem 'puppet', *location_for(ENV['PUPPET_GEM_VERSION'])
# Only explicitly specify Facter/Hiera if a version has been specified.
# Otherwise it can lead to strange bundler behavior. If you are seeing weird
# gem resolution behavior, try setting `DEBUG_RESOLVER` environment variable
# to `1` and then run bundle install.
gem 'facter', *location_for(ENV['FACTER_GEM_VERSION']) if ENV['FACTER_GEM_VERSION']
gem 'hiera', *location_for(ENV['HIERA_GEM_VERSION']) if ENV['HIERA_GEM_VERSION']
# Evaluate Gemfile.local if it exists
if File.exists? "#{__FILE__}.local"
eval(File.read("#{__FILE__}.local"), binding)
end
# Evaluate ~/.gemfile if it exists
if File.exists?(File.join(Dir.home, '.gemfile'))
eval(File.read(File.join(Dir.home, '.gemfile')), binding)
end
# vim:ft=ruby
## Maintenance
Maintainers:
- Puppet Forge Modules Team `forge-modules |at| puppet |dot| com`
Tickets: https://tickets.puppet.com/browse/MODULES. Make sure to set component to `vcsrepo`.
Puppet Module - puppetlabs-vcsrepo
Copyright 2017 Puppet, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
\ No newline at end of file
require 'puppetlabs_spec_helper/rake_tasks'
require 'puppet-lint/tasks/puppet-lint'
require 'puppet_blacksmith/rake_tasks' if Bundler.rubygems.find_name('puppet-blacksmith').any?
PuppetLint.configuration.fail_on_warnings = true
PuppetLint.configuration.send('relative')
desc 'Generate pooler nodesets'
task :gen_nodeset do
require 'beaker-hostgenerator'
require 'securerandom'
require 'fileutils'
agent_target = ENV['TEST_TARGET']
if ! agent_target
STDERR.puts 'TEST_TARGET environment variable is not set'
STDERR.puts 'setting to default value of "redhat-64default."'
agent_target = 'redhat-64default.'
end
master_target = ENV['MASTER_TEST_TARGET']
if ! master_target
STDERR.puts 'MASTER_TEST_TARGET environment variable is not set'
STDERR.puts 'setting to default value of "redhat7-64mdcl"'
master_target = 'redhat7-64mdcl'
end
targets = "#{master_target}-#{agent_target}"
cli = BeakerHostGenerator::CLI.new([targets])
nodeset_dir = "tmp/nodesets"
nodeset = "#{nodeset_dir}/#{targets}-#{SecureRandom.uuid}.yaml"
FileUtils.mkdir_p(nodeset_dir)
File.open(nodeset, 'w') do |fh|
fh.print(cli.execute)
end
puts nodeset
end
vcsrepo { '/tmp/vcstest-bzr-branch':
ensure => present,
provider => bzr,
source => 'lp:do',
revision => '1312',
}
vcsrepo { '/tmp/vcstest-bzr-init':
ensure => present,
provider => bzr,
}
vcsrepo { '/tmp/vcstest-cvs-repo':
ensure => present,
provider => cvs,
}
vcsrepo { '/tmp/vcstest-cvs-workspace-local':
ensure => present,
provider => cvs,
source => '/tmp/vcstest-cvs-repo',
require => Vcsrepo['/tmp/vcstest-cvs-repo'],
}
vcsrepo { '/tmp/vcstest-cvs-workspace-remote':
ensure => present,
provider => cvs,
source => ':pserver:anonymous@cvs.sv.gnu.org:/sources/leetcvrt',
}
vcsrepo { '/tmp/vcstest-git-bare':
ensure => bare,
provider => git,
}
vcsrepo { '/tmp/vcstest-git-clone':
ensure => present,
provider => git,
source => 'git://github.com/bruce/rtex.git',
}
vcsrepo { '/tmp/git':
ensure => 'present',
provider => 'git',
source => 'https://github.com/git/git.git',
branch => 'v2.2.0',
depth => 1,
}
vcsrepo { '/tmp/vcstest-git-wc':
ensure => present,
provider => git,
}
vcsrepo { '/tmp/vcstest-hg-clone':
ensure => present,
provider => hg,
source => 'http://hg.basho.com/riak',
revision => 'riak-0.5.3',
}
vcsrepo { '/path/to/repo':
ensure => latest,
provider => 'hg',
source => 'http://hg.example.com/myrepo',
basic_auth_username => 'hgusername',
basic_auth_password => 'hgpassword',
}
vcsrepo { '/tmp/vcstest-hg-init':
ensure => present,
provider => hg,
}
vcsrepo { '/tmp/vcstest/p4_client_root':
ensure => present,
provider => 'p4',
}
vcsrepo { '/tmp/vcstest/p4_client_root':
ensure => absent,
provider => 'p4',
}
vcsrepo { '/tmp/vcstest/p4_client_root':
ensure => latest,
provider => 'p4',
source => '//depot/...',
}
vcsrepo { '/tmp/vcstest/p4_client_root':
ensure => present,
provider => 'p4',
source => '//depot/...',
revision => '30',
}
vcsrepo { '/tmp/vcstest-svn-checkout':
ensure => present,
provider => svn,
source => 'http://svn.edgewall.org/repos/babel/trunk',
}
vcsrepo { '/tmp/vcstest-svn-server':
ensure => present,
provider => svn,
}
Facter.add(:vcsrepo_svn_ver) do
setcode do
begin
version = Facter::Core::Execution.execute('svn --version --quiet')
if Gem::Version.new(version) > Gem::Version.new('0.0.1')
version
else
''
end
rescue
''
end
end
end
\ No newline at end of file
require 'tmpdir'
require 'digest/md5'
require 'fileutils'
# Abstract
class Puppet::Provider::Vcsrepo < Puppet::Provider
def check_force
if path_exists? and not path_empty?
if @resource.value(:force)
notice "Removing %s to replace with desired repository." % @resource.value(:path)
destroy
else
raise Puppet::Error, "Path %s exists and is not the desired repository." % @resource.value(:path)
end
end
end
private
def set_ownership
owner = @resource.value(:owner) || nil
group = @resource.value(:group) || nil
FileUtils.chown_R(owner, group, @resource.value(:path))
end
def path_exists?
File.directory?(@resource.value(:path))
end
def path_empty?
# Path is empty if the only entries are '.' and '..'
d = Dir.new(@resource.value(:path))
d.read # should return '.'
d.read # should return '..'
d.read.nil?
end
# Note: We don't rely on Dir.chdir's behavior of automatically returning the
# value of the last statement -- for easier stubbing.
def at_path(&block) #:nodoc:
value = nil
Dir.chdir(@resource.value(:path)) do
value = yield
end
value
end
def tempdir
@tempdir ||= File.join(Dir.tmpdir, 'vcsrepo-' + Digest::MD5.hexdigest(@resource.value(:path)))
end
end
require File.join(File.dirname(__FILE__), '..', 'vcsrepo')
Puppet::Type.type(:vcsrepo).provide(:bzr, :parent => Puppet::Provider::Vcsrepo) do
desc "Supports Bazaar repositories"
commands :bzr => 'bzr'
has_features :reference_tracking
def create
check_force
if !@resource.value(:source)
create_repository(@resource.value(:path))
else
clone_repository(@resource.value(:revision))
end
end
def working_copy_exists?
return false if not File.directory?(@resource.value(:path))
begin
bzr('status', @resource.value(:path))
return true
rescue Puppet::ExecutionFailure
return false
end
end
def exists?
working_copy_exists?
end
def destroy
FileUtils.rm_rf(@resource.value(:path))
end
def revision
at_path do
current_revid = bzr('version-info')[/^revision-id:\s+(\S+)/, 1]
desired = @resource.value(:revision)
begin
desired_revid = bzr('revision-info', desired).strip.split(/\s+/).last
rescue Puppet::ExecutionFailure
# Possible revid available during update (but definitely not current)
desired_revid = nil
end
if current_revid == desired_revid
desired
else
current_revid
end
end
end
def revision=(desired)
at_path do
begin
bzr('update', '-r', desired)
rescue Puppet::ExecutionFailure
bzr('update', '-r', desired, ':parent')
end
end
update_owner
end
def source
at_path do
bzr('info')[/^\s+parent branch:\s+(\S+?)$/m, 1]
end
end
def source=(desired)
create # recreate
end
def latest
at_path do
bzr('version-info', ':parent')[/^revision-id:\s+(\S+)/, 1]
end
end
def latest?
at_path do
return self.revision == self.latest
end
end
private
def create_repository(path)
bzr('init', path)
update_owner
end
def clone_repository(revision)
args = ['branch']
if revision
args.push('-r', revision)
end
args.push(@resource.value(:source),
@resource.value(:path))
bzr(*args)
update_owner
end
def update_owner
if @resource.value(:owner) or @resource.value(:group)
set_ownership
end
end
end
require File.join(File.dirname(__FILE__), '..', 'vcsrepo')
Puppet::Type.type(:vcsrepo).provide(:cvs, :parent => Puppet::Provider::Vcsrepo) do
desc "Supports CVS repositories/workspaces"
commands :cvs => 'cvs'
has_features :gzip_compression, :reference_tracking, :modules, :cvs_rsh, :user
def create
check_force
if !@resource.value(:source)
create_repository(@resource.value(:path))
else
checkout_repository
end
update_owner
end
def exists?
working_copy_exists?
end
def working_copy_exists?
if @resource.value(:source)
directory = File.join(@resource.value(:path), 'CVS')
return false if not File.directory?(directory)
begin
at_path { runcvs('-nqd', @resource.value(:path), 'status', '-l') }
return true
rescue Puppet::ExecutionFailure
return false
end
else
directory = File.join(@resource.value(:path), 'CVSROOT')
return false if not File.directory?(directory)
config = File.join(@resource.value(:path), 'CVSROOT', 'config,v')
return false if not File.exists?(config)
return true
end
end
def destroy
FileUtils.rm_rf(@resource.value(:path))
end
def latest?
Puppet.debug "Checking for updates because 'ensure => latest'"
at_path do
# We cannot use -P to prune empty dirs, otherwise
# CVS would report those as "missing", regardless
# if they have contents or updates.
is_current = (runcvs('-nq', 'update', '-d').strip == "")
if (!is_current) then Puppet.debug "There are updates available on the checkout's current branch/tag." end
return is_current
end
end
def latest
# CVS does not have a conecpt like commit-IDs or change
# sets, so we can only have the current branch name (or the
# requested one, if that differs) as the "latest" revision.
should = @resource.value(:revision)
current = self.revision
return should != current ? should : current
end
def revision
if !@rev
if File.exist?(tag_file)
contents = File.read(tag_file).strip
# Note: Doesn't differentiate between N and T entries
@rev = contents[1..-1]
else
@rev = 'HEAD'
end
Puppet.debug "Checkout is on branch/tag '#{@rev}'"
end
return @rev
end
def revision=(desired)
at_path do
runcvs('update', '-dr', desired, '.')
update_owner
@rev = desired
end
end
def source
File.read(File.join(@resource.value(:path), 'CVS', 'Root')).chomp
end
def source=(desired)
create # recreate
end
def module
File.read(File.join(@resource.value(:path), 'CVS', 'Repository')).chomp
end
def module=(desired)
create # recreate
end
private
def tag_file
File.join(@resource.value(:path), 'CVS', 'Tag')
end
def checkout_repository
dirname, basename = File.split(@resource.value(:path))
Dir.chdir(dirname) do
args = ['-d', @resource.value(:source)]
if @resource.value(:compression)
args.push('-z', @resource.value(:compression))
end
args.push('checkout')
if @resource.value(:revision)
args.push('-r', @resource.value(:revision))
end
args.push('-d', basename, module_name)
runcvs(*args)
end
end
# If no module is provided, use '.', the root of the repo
def module_name
@resource.value(:module) or '.'
end
def create_repository(path)
runcvs('-d', path, 'init')
end
def update_owner
if @resource.value(:owner) or @resource.value(:group)
set_ownership
end
end
def runcvs(*args)
if @resource.value(:cvs_rsh)
Puppet.debug "Using CVS_RSH = " + @resource.value(:cvs_rsh)
e = { :CVS_RSH => @resource.value(:cvs_rsh) }
else
e = {}
end
if @resource.value(:user) and @resource.value(:user) != Facter['id'].value
Puppet.debug "Running as user " + @resource.value(:user)
Puppet::Util::Execution.execute([:cvs, *args], :uid => @resource.value(:user), :custom_environment => e, :combine => true, :failonfail => true)
else
Puppet::Util::Execution.execute([:cvs, *args], :custom_environment => e, :combine => true, :failonfail => true)
end
end
end
require File.join(File.dirname(__FILE__), '..', 'vcsrepo')
Puppet::Type.type(:vcsrepo).provide(:dummy, :parent => Puppet::Provider::Vcsrepo) do
desc "Dummy default provider"
defaultfor :feature => :posix
defaultfor :operatingsystem => :windows
def working_copy_exists?
providers = @resource.class.providers.map{|x| x.to_s}.sort.reject{|x| x == "dummy"}.join(", ") rescue "none"
raise("vcsrepo resource must have a provider, available: #{providers}")
end
end
require File.join(File.dirname(__FILE__), '..', 'vcsrepo')
Puppet::Type.type(:vcsrepo).provide(:hg, :parent => Puppet::Provider::Vcsrepo) do
desc "Supports Mercurial repositories"
commands :hg => 'hg'
has_features :reference_tracking, :ssh_identity, :user, :basic_auth
def create
check_force
if !@resource.value(:source)
create_repository(@resource.value(:path))
else
clone_repository(@resource.value(:revision))
end
update_owner
end
def working_copy_exists?
return false if not File.directory?(@resource.value(:path))
begin
hg_wrapper('status', @resource.value(:path))
return true
rescue Puppet::ExecutionFailure
return false
end
end
def exists?
working_copy_exists?
end
def destroy
FileUtils.rm_rf(@resource.value(:path))
end
def latest?
at_path do
return self.revision == self.latest
end
end
def latest
at_path do
begin
hg_wrapper('incoming', '--branch', '.', '--newest-first', '--limit', '1', { :remote => true })[/^changeset:\s+(?:-?\d+):(\S+)/m, 1]
rescue Puppet::ExecutionFailure
# If there are no new changesets, return the current nodeid
self.revision
end
end
end
def revision
at_path do
current = hg_wrapper('parents')[/^changeset:\s+(?:-?\d+):(\S+)/m, 1]
desired = @resource.value(:revision)
if desired
# Return the tag name if it maps to the current nodeid
mapped = hg_wrapper('tags')[/^#{Regexp.quote(desired)}\s+\d+:(\S+)/m, 1]
if current == mapped
desired
else
current
end
else
current
end
end
end
def revision=(desired)
at_path do
begin
hg_wrapper('pull', { :remote => true })
rescue
end
begin
hg_wrapper('merge')
rescue Puppet::ExecutionFailure
# If there's nothing to merge, just skip
end
hg_wrapper('update', '--clean', '-r', desired)
end
update_owner
end
def source
at_path do
hg_wrapper('paths')[/^default = (.*)/, 1]
end
end
def source=(desired)
create # recreate
end
private
def create_repository(path)
hg_wrapper('init', path)
end
def clone_repository(revision)
args = ['clone']
if revision
args.push('-u', revision)
end
args.push(@resource.value(:source),
@resource.value(:path))
args.push({ :remote => true })
hg_wrapper(*args)
end
def update_owner
if @resource.value(:owner) or @resource.value(:group)
set_ownership
end
end
def hg_wrapper(*args)
options = { :remote => false }
if args.length > 0 and args[-1].is_a? Hash
options.merge!(args.pop)
end
if @resource.value(:basic_auth_username) && @resource.value(:basic_auth_password)
args += [
"--config", "\"auth.x.prefix=#{@resource.value(:source)}\"",
"--config", "\"auth.x.username=#{@resource.value(:basic_auth_username)}\"",
"--config", "\"auth.x.password=#{@resource.value(:basic_auth_password)}\"",
"--config", "\"auth.x.schemes=http https\""
]
end
if options[:remote] and @resource.value(:identity)
args += ["--ssh", "ssh -oStrictHostKeyChecking=no -oPasswordAuthentication=no -oKbdInteractiveAuthentication=no -oChallengeResponseAuthentication=no -i #{@resource.value(:identity)}"]
end
if @resource.value(:user) and @resource.value(:user) != Facter['id'].value
args.map! { |a| if a =~ /\s/ then "'#{a}'" else a end } # Adds quotes to arguments with whitespaces.
Puppet::Util::Execution.execute("hg #{args.join(' ')}", :uid => @resource.value(:user), :failonfail => true, :combine => true)
else
hg(*args)
end
end
end
require File.join(File.dirname(__FILE__), '..', 'vcsrepo')
Puppet::Type.type(:vcsrepo).provide(:p4, :parent => Puppet::Provider::Vcsrepo) do
desc "Supports Perforce depots"
has_features :filesystem_types, :reference_tracking, :p4config
def create
check_force
# create or update client
create_client(client_name)
# if source provided, sync client
source = @resource.value(:source)
if source
revision = @resource.value(:revision)
sync_client(source, revision)
end
update_owner
end
def working_copy_exists?
# Check if the server is there, or raise error
p4(['info'], {:marshal => false})
# Check if workspace is setup
args = ['where']
args.push(@resource.value(:path) + "/...")
hash = p4(args, {:raise => false})
return (hash['code'] != "error")
end
def exists?
working_copy_exists?
end
def destroy
args = ['client']
args.push('-d', '-f')
args.push(client_name)
p4(args)
FileUtils.rm_rf(@resource.value(:path))
end
def latest?
rev = self.revision
if rev
(rev >= self.latest)
else
true
end
end
def latest
args = ['changes']
args.push('-m1', @resource.value(:source))
hash = p4(args)
return hash['change'].to_i
end
def revision
args = ['cstat']
args.push(@resource.value(:source))
hash = p4(args, {:marshal => false})
hash = marshal_cstat(hash)
revision = 0
if hash && hash['code'] != 'error'
hash['data'].each do |c|
if c['status'] == 'have'
change = c['change'].to_i
revision = change if change > revision
end
end
end
return revision
end
def revision=(desired)
sync_client(@resource.value(:source), desired)
update_owner
end
def source
args = ['where']
args.push(@resource.value(:path) + "/...")
hash = p4(args, {:raise => false})
return hash['depotFile']
end
def source=(desired)
create # recreate
end
private
def update_owner
if @resource.value(:owner) or @resource.value(:group)
set_ownership
end
end
# Sync the client workspace files to head or specified revision.
# Params:
# +source+:: Depot path to sync
# +revision+:: Perforce change list to sync to (optional)
def sync_client(source, revision)
Puppet.debug "Syncing: #{source}"
args = ['sync']
if revision
args.push(source + "@#{revision}")
else
args.push(source)
end
p4(args)
end
# Returns the name of the Perforce client workspace
def client_name
p4config = @resource.value(:p4config)
# default (generated) client name
path = @resource.value(:path)
host = Facter.value('hostname')
default = "puppet-" + Digest::MD5.hexdigest(path + host)
# check config for client name
set_client = nil
if p4config && File.file?(p4config)
open(p4config) do |f|
m = f.grep(/^P4CLIENT=/).pop
p = /^P4CLIENT=(.*)$/
set_client = p.match(m)[1] if m
end
end
return set_client || ENV['P4CLIENT'] || default
end
# Create (or update) a client workspace spec.
# If a client name is not provided then a hash based on the path is used.
# Params:
# +client+:: Name of client workspace
# +path+:: The Root location of the Perforce client workspace
def create_client(client)
Puppet.debug "Creating client: #{client}"
# fetch client spec
hash = parse_client(client)
hash['Root'] = @resource.value(:path)
hash['Description'] = "Generated by Puppet VCSrepo"
# check is source is a Stream
source = @resource.value(:source)
if source
parts = source.split(/\//)
if parts && parts.length >= 4
source = "//" + parts[2] + "/" + parts[3]
streams = p4(['streams', source], {:raise => false})
if streams['code'] == "stat"
hash['Stream'] = streams['Stream']
notice "Streams" + streams['Stream'].inspect
end
end
end
# save client spec
save_client(hash)
end
# Fetches a client workspace spec from Perforce and returns a hash map representation.
# Params:
# +client+:: name of the client workspace
def parse_client(client)
args = ['client']
args.push('-o', client)
hash = p4(args)
return hash
end
# Saves the client workspace spec from the given hash
# Params:
# +hash+:: hash map of client spec
def save_client(hash)
spec = String.new
view = "\nView:\n"
hash.keys.sort.each do |k|
v = hash[k]
next if( k == "code" )
if(k.to_s =~ /View/ )
view += "\t#{v}\n"
else
spec += "#{k.to_s}: #{v.to_s}\n"
end
end
spec += view
args = ['client']
args.push('-i')
p4(args, {:input => spec, :marshal => false})
end
# Sets Perforce Configuration environment.
# P4CLIENT generated, but overwitten if defined in config.
def config
p4config = @resource.value(:p4config)
cfg = Hash.new
cfg.store 'P4CONFIG', p4config if p4config
cfg.store 'P4CLIENT', client_name
return cfg
end
def p4(args, options = {})
# Merge custom options with defaults
opts = {
:raise => true, # Raise errors
:marshal => true, # Marshal output
}.merge(options)
cmd = ['p4']
cmd.push '-R' if opts[:marshal]
cmd.push args
cmd_str = cmd.respond_to?(:join) ? cmd.join(' ') : cmd
Puppet.debug "environment: #{config}"
Puppet.debug "command: #{cmd_str}"
hash = Hash.new
Open3.popen3(config, cmd_str) do |i, o, e, t|
# Send input stream if provided
if(opts[:input])
Puppet.debug "input:\n" + opts[:input]
i.write opts[:input]
i.close
end
if(opts[:marshal])
hash = Marshal.load(o)
else
hash['data'] = o.read
end
# Raise errors, Perforce or Exec
if(opts[:raise] && !e.eof && t.value != 0)
raise Puppet::Error, "\nP4: #{e.read}"
end
if(opts[:raise] && hash['code'] == 'error' && t.value != 0)
raise Puppet::Error, "\nP4: #{hash['data']}"
end
end
Puppet.debug "hash: #{hash}\n"
return hash
end
# helper method as cstat does not Marshal
def marshal_cstat(hash)
data = hash['data']
code = 'error'
list = Array.new
change = Hash.new
data.each_line do |l|
p = /^\.\.\. (.*) (.*)$/
m = p.match(l)
if m
change[m[1]] = m[2]
if m[1] == 'status'
code = 'stat'
list.push change
change = Hash.new
end
end
end
hash = Hash.new
hash.store 'code', code
hash.store 'data', list
return hash
end
end
require File.join(File.dirname(__FILE__), '..', 'vcsrepo')
Puppet::Type.type(:vcsrepo).provide(:svn, :parent => Puppet::Provider::Vcsrepo) do
desc "Supports Subversion repositories"
commands :svn => 'svn',
:svnadmin => 'svnadmin',
:svnlook => 'svnlook'
has_features :filesystem_types, :reference_tracking, :basic_auth, :configuration, :conflict, :depth,
:include_paths
def create
check_force
if !@resource.value(:source)
if @resource.value(:includes)
raise Puppet::Error, "Specifying include paths on a nonexistent repo."
end
create_repository(@resource.value(:path))
else
checkout_repository(@resource.value(:source),
@resource.value(:path),
@resource.value(:revision),
@resource.value(:depth))
end
if @resource.value(:includes)
validate_version
update_includes(@resource.value(:includes))
end
update_owner
end
def working_copy_exists?
return false if not File.directory?(@resource.value(:path))
if @resource.value(:source)
begin
svn('status', @resource.value(:path))
return true
rescue Puppet::ExecutionFailure
return false
end
else
begin
svnlook('uuid', @resource.value(:path))
return true
rescue Puppet::ExecutionFailure
return false
end
end
end
def exists?
working_copy_exists?
end
def destroy
FileUtils.rm_rf(@resource.value(:path))
end
def latest?
at_path do
(self.revision >= self.latest) and (@resource.value(:source) == self.source)
end
end
def buildargs
args = ['--non-interactive']
if @resource.value(:basic_auth_username) && @resource.value(:basic_auth_password)
args.push('--username', @resource.value(:basic_auth_username))
args.push('--password', @resource.value(:basic_auth_password))
args.push('--no-auth-cache')
end
if @resource.value(:configuration)
args.push('--config-dir', @resource.value(:configuration))
end
if @resource.value(:trust_server_cert) != :false
args.push('--trust-server-cert')
end
args
end
def latest
args = buildargs.push('info', '-r', 'HEAD')
at_path do
svn(*args)[/^Revision:\s+(\d+)/m, 1]
end
end
def source
args = buildargs.push('info')
at_path do
svn(*args)[/^URL:\s+(\S+)/m, 1]
end
end
def source=(desired)
args = buildargs.push('switch')
if @resource.value(:force)
args.push('--force')
end
if @resource.value(:revision)
args.push('-r', @resource.value(:revision))
end
if @resource.value(:conflict)
args.push('--accept', @resource.value(:conflict))
end
args.push(desired)
at_path do
svn(*args)
end
update_owner
end
def revision
args = buildargs.push('info')
at_path do
svn(*args)[/^Revision:\s+(\d+)/m, 1]
end
end
def revision=(desired)
args = if @resource.value(:source)
buildargs.push('switch', '-r', desired, @resource.value(:source))
else
buildargs.push('update', '-r', desired)
end
if @resource.value(:force)
args.push('--force')
end
if @resource.value(:conflict)
args.push('--accept', @resource.value(:conflict))
end
at_path do
svn(*args)
end
update_owner
end
def includes
return nil if Gem::Version.new(get_svn_client_version) < Gem::Version.new('1.6.0')
get_includes('.')
end
def includes=(desired)
validate_version
exists = includes
old_paths = exists - desired
new_paths = desired - exists
# Remove paths that are no longer specified
old_paths.each { |path| delete_include(path) }
update_includes(new_paths)
end
private
def get_includes(directory)
at_path do
args = buildargs.push('info', directory)
if svn(*args)[/^Depth:\s+(\w+)/m, 1] != 'empty'
return directory[2..-1].gsub(File::SEPARATOR, '/')
end
Dir.entries(directory).map { |entry|
next if entry == '.' or entry == '..' or entry == '.svn'
entry = File.join(directory, entry)
if File.directory?(entry)
get_includes(entry)
elsif File.file?(entry)
entry[2..-1].gsub(File::SEPARATOR, '/')
end
}.flatten.compact!
end
end
def delete_include(path)
at_path do
# svn version 1.6 has an incorrect implementation of the `exclude`
# parameter to `--set-depth`; it doesn't handle files, only
# directories. I know, I rolled my eyes, too.
svn_ver = get_svn_client_version
if Gem::Version.new(svn_ver) < Gem::Version.new('1.7.0') and not File.directory?(path)
# In the non-happy case, we delete the file, and check if the only
# thing left in that directory is the .svn folder. If that's the case,
# the loop below will take care of excluding the parent directory, and
# we're back to a happy case. But, if that's not the case, we need to
# fire off a warning telling the user the path can't be excluded.
Puppet.debug "Vcsrepo[#{@resource.name}]: Need to handle #{path} removal specially"
File.delete(path)
if Dir.entries(File.dirname(path)).sort != ['.', '..', '.svn']
Puppet.warning "Unable to exclude #{path} from Vcsrepo[#{@resource.name}]; update to subversion >= 1.7"
end
else
Puppet.debug "Vcsrepo[#{@resource.name}]: Can remove #{path} directly using svn"
args = buildargs.push('update', '--set-depth', 'exclude', path)
svn(*args)
end
# Keep walking up the parent directories of this include until we find
# a non-empty folder, excluding as we go.
while ((path = path.rpartition(File::SEPARATOR)[0]) != '') do
entries = Dir.entries(path).sort
break if entries != ['.', '..'] and entries != ['.', '..', '.svn']
args = buildargs.push('update', '--set-depth', 'exclude', path)
svn(*args)
end
end
end
def checkout_repository(source, path, revision, depth)
args = buildargs.push('checkout')
if revision
args.push('-r', revision)
end
if @resource.value(:includes)
# Make root checked out at empty depth to provide sparse directories
args.push('--depth', 'empty')
elsif depth
args.push('--depth', depth)
end
args.push(source, path)
svn(*args)
end
def create_repository(path)
args = ['create']
if @resource.value(:fstype)
args.push('--fs-type', @resource.value(:fstype))
end
args << path
svnadmin(*args)
end
def update_owner
if @resource.value(:owner) or @resource.value(:group)
set_ownership
end
end
def update_includes(paths)
at_path do
args = buildargs.push('update')
args.push('--depth', 'empty')
if @resource.value(:revision)
args.push('-r', @resource.value(:revision))
end
parents = paths.map { |path| File.dirname(path) }
parents = make_include_paths(parents)
args.push(*parents)
svn(*args)
args = buildargs.push('update')
if @resource.value(:revision)
args.push('-r', @resource.value(:revision))
end
if @resource.value(:depth)
args.push('--depth', @resource.value(:depth))
end
args.push(*paths)
svn(*args)
end
end
def make_include_paths(includes)
includes.map { |inc|
prefix = nil
inc.split("/").map { |path|
prefix = [prefix, path].compact.join('/')
}
}.flatten
end
def get_svn_client_version
return Facter.value('vcsrepo_svn_ver').dup
end
def validate_version
svn_ver = get_svn_client_version
if Gem::Version.new(svn_ver) < Gem::Version.new('1.6.0')
raise "Includes option is not available for SVN versions < 1.6. Version installed: #{svn_ver}"
end
end
end
require 'pathname'
require 'puppet/parameter/boolean'
Puppet::Type.newtype(:vcsrepo) do
desc "A local version control repository"
feature :gzip_compression,
"The provider supports explicit GZip compression levels"
feature :basic_auth,
"The provider supports HTTP Basic Authentication"
feature :bare_repositories,
"The provider differentiates between bare repositories
and those with working copies",
:methods => [:bare_exists?, :working_copy_exists?]
feature :filesystem_types,
"The provider supports different filesystem types"
feature :reference_tracking,
"The provider supports tracking revision references that can change
over time (eg, some VCS tags and branch names)"
feature :ssh_identity,
"The provider supports a configurable SSH identity file"
feature :user,
"The provider can run as a different user"
feature :modules,
"The repository contains modules that can be chosen of"
feature :multiple_remotes,
"The repository tracks multiple remote repositories"
feature :configuration,
"The configuration directory to use"
feature :cvs_rsh,
"The provider understands the CVS_RSH environment variable"
feature :depth,
"The provider can do shallow clones or set scope limit"
feature :branch,
"The name of the branch"
feature :p4config,
"The provider understands Perforce Configuration"
feature :submodules,
"The repository contains submodules which can be optionally initialized"
feature :conflict,
"The provider supports automatic conflict resolution"
feature :include_paths,
"The provider supports checking out only specific paths"
ensurable do
attr_accessor :latest
def insync?(is)
@should ||= []
case should
when :present
return true unless [:absent, :purged, :held].include?(is)
when :latest
if is == :latest
return true
else
return false
end
when :bare
return is == :bare
when :mirror
return is == :mirror
when :absent
return is == :absent
end
end
newvalue :present do
if !provider.exists?
provider.create
elsif provider.class.feature?(:bare_repositories) and provider.bare_exists?
provider.convert_bare_to_working_copy
end
end
newvalue :bare, :required_features => [:bare_repositories] do
if !provider.exists?
provider.create
elsif provider.working_copy_exists?
provider.convert_working_copy_to_bare
elsif provider.mirror?
provider.set_no_mirror
end
end
newvalue :mirror, :required_features => [:bare_repositories] do
if !provider.exists?
provider.create
elsif provider.working_copy_exists?
provider.convert_working_copy_to_bare
elsif !provider.mirror?
provider.set_mirror
end
end
newvalue :absent do
provider.destroy
end
newvalue :latest, :required_features => [:reference_tracking] do
if provider.exists? && !@resource.value(:force)
if provider.class.feature?(:bare_repositories) and provider.bare_exists?
provider.convert_bare_to_working_copy
end
if provider.respond_to?(:update_references)
provider.update_references
end
if provider.respond_to?(:latest?)
reference = provider.latest || provider.revision
else
reference = resource.value(:revision) || provider.revision
end
notice "Updating to latest '#{reference}' revision"
provider.revision = reference
else
notice "Creating repository from latest"
provider.create
end
end
def retrieve
prov = @resource.provider
if prov
if prov.working_copy_exists?
(@should.include?(:latest) && prov.latest?) ? :latest : :present
elsif prov.class.feature?(:bare_repositories) and prov.bare_exists?
if prov.mirror?
:mirror
else
:bare
end
else
:absent
end
else
raise Puppet::Error, "Could not find provider"
end
end
end
newparam :path do
desc "Absolute path to repository"
isnamevar
validate do |value|
path = Pathname.new(value)
unless path.absolute?
raise ArgumentError, "Path must be absolute: #{path}"
end
end
end
newproperty :source do
desc "The source URI for the repository"
# Tolerate versions/providers that strip/add trailing slashes
def insync?(is)
# unwrap @should
should = @should[0]
return true if is == should
begin
if should[-1] == '/'
return true if is == should[0..-2]
elsif is[-1] == '/'
return true if is[0..-2] == should
end
rescue
end
return false
end
end
newparam :fstype, :required_features => [:filesystem_types] do
desc "Filesystem type"
end
newproperty :revision do
desc "The revision of the repository"
newvalue(/^\S+$/)
end
newparam :owner do
desc "The user/uid that owns the repository files"
end
newparam :group do
desc "The group/gid that owns the repository files"
end
newparam :user do
desc "The user to run for repository operations"
end
newparam :excludes do
desc "Local paths which shouldn't be tracked by the repository"
end
newproperty :includes, :required_features => [:include_paths], :array_matching => :all do
desc "Paths to be included from the repository"
def insync?(is)
if is.is_a?(Array) and @should.is_a?(Array)
is.sort == @should.sort
else
is == @should
end
end
validate do |path|
if path[0..0] == '/'
raise Puppet::Error, "Include path '#{path}' starts with a '/'; remove it"
else
super(path)
end
end
end
newparam(:force, :boolean => true, :parent => Puppet::Parameter::Boolean) do
desc "Force repository creation, destroying any files on the path in the process."
defaultto false
end
newparam :compression, :required_features => [:gzip_compression] do
desc "Compression level"
validate do |amount|
unless Integer(amount).between?(0, 6)
raise ArgumentError, "Unsupported compression level: #{amount} (expected 0-6)"
end
end
end
newparam :basic_auth_username, :required_features => [:basic_auth] do
desc "HTTP Basic Auth username"
end
newparam :basic_auth_password, :required_features => [:basic_auth] do
desc "HTTP Basic Auth password"
end
newparam :identity, :required_features => [:ssh_identity] do
desc "SSH identity file"
end
newproperty :module, :required_features => [:modules] do
desc "The repository module to manage"
end
newparam :remote, :required_features => [:multiple_remotes] do
desc "The remote repository to track"
defaultto "origin"
end
newparam :configuration, :required_features => [:configuration] do
desc "The configuration directory to use"
end
newparam :cvs_rsh, :required_features => [:cvs_rsh] do
desc "The value to be used for the CVS_RSH environment variable."
end
newparam :depth, :required_features => [:depth] do
desc "The value to be used to do a shallow clone."
end
newparam :branch, :required_features => [:branch] do
desc "The name of the branch to clone."
end
newparam :p4config, :required_features => [:p4config] do
desc "The Perforce P4CONFIG environment."
end
newparam :submodules, :required_features => [:submodules] do
desc "Initialize and update each submodule in the repository."
newvalues(:true, :false)
defaultto true
end
newparam :conflict do
desc "The action to take if conflicts exist between repository and working copy"
end
newparam :trust_server_cert do
desc "Trust server certificate"
newvalues(:true, :false)
defaultto :false
end
autorequire(:package) do
['git', 'git-core', 'mercurial', 'subversion']
end
end
---
# This is the project-specific configuration file for setting up
# fast_gettext for your project.
gettext:
# This is used for the name of the .pot and .po files; they will be
# called <project_name>.pot?
project_name: puppetlabs-vcsrepo
# This is used in comments in the .pot and .po files to indicate what
# project the files belong to and should bea little more desctiptive than
# <project_name>
package_name: puppetlabs-vcsrepo
# The locale that the default messages in the .pot file are in
default_locale: en
# The email used for sending bug reports.
bugs_address: docs@puppet.com
# The holder of the copyright.
copyright_holder: Puppet, Inc.
# This determines which comments in code should be eligible for translation.
# Any comments that start with this string will be externalized. (Leave
# empty to include all.)
comments_tag: TRANSLATOR
# Patterns for +Dir.glob+ used to find all files that might contain
# translatable content, relative to the project root directory
source_files:
{
"name": "puppetlabs-vcsrepo",
"version": "2.0.0",
"author": "Puppet Labs",
"summary": "Puppet module providing a type to manage repositories from various version control systems",
"license": "GPL-2.0+",
"source": "https://github.com/puppetlabs/puppetlabs-vcsrepo",
"project_page": "https://github.com/puppetlabs/puppetlabs-vcsrepo",
"issues_url": "https://tickets.puppetlabs.com/browse/MODULES",
"dependencies": [
],
"data_provider": null,
"operatingsystem_support": [
{
"operatingsystem": "RedHat",
"operatingsystemrelease": [
"5",
"6",
"7"
]
},
{
"operatingsystem": "CentOS",
"operatingsystemrelease": [
"5",
"6",
"7"
]
},
{
"operatingsystem": "OracleLinux",
"operatingsystemrelease": [
"5",
"6",
"7"
]
},
{
"operatingsystem": "Scientific",
"operatingsystemrelease": [
"5",
"6",
"7"
]
},
{
"operatingsystem": "SLES",
"operatingsystemrelease": [
"10 SP4",
"11 SP1",
"12"
]
},
{
"operatingsystem": "Debian",
"operatingsystemrelease": [
"6",
"7",
"8"
]
},
{
"operatingsystem": "Ubuntu",
"operatingsystemrelease": [
"10.04",
"12.04",
"14.04",
"16.04"
]
}
],
"requirements": [
{
"name": "puppet",
"version_requirement": ">= 4.7.0 < 6.0.0"
}
]
}
test_name 'C3492 - checkout with basic auth (http protocol)'
skip_test 'HTTP not supported yet for basic auth using git. See FM-1331'
# Globals
repo_name = 'testrepo_checkout'
user = 'foo'
password = 'bar'
http_server_script = 'basic_auth_http_daemon.rb'
hosts.each do |host|
ruby = '/opt/puppet/bin/ruby' if host.is_pe? || 'ruby'
gem = '/opt/puppet/bin/gem' if host.is_pe? || 'gem'
tmpdir = host.tmpdir('vcsrepo')
step 'setup - create repo' do
git_pkg = 'git'
if host['platform'] =~ /ubuntu-10/
git_pkg = 'git-core'
end
install_package(host, git_pkg)
my_root = File.expand_path(File.join(File.dirname(__FILE__), '../../../..'))
scp_to(host, "#{my_root}/acceptance/files/create_git_repo.sh", tmpdir)
on(host, "cd #{tmpdir} && ./create_git_repo.sh")
end
step 'setup - start http server' do
script =<<-EOF
require 'sinatra'
set :bind, '0.0.0.0'
set :static, true
set :public_folder, '#{tmpdir}'
use Rack::Auth::Basic do |username, password|
username == '#{user}' && password == '#{password}'
end
EOF
create_remote_file(host, "#{tmpdir}/#{http_server_script}", script)
on(host, "#{gem} install sinatra")
on(host, "#{ruby} #{tmpdir}/#{http_server_script} &")
end
teardown do
on(host, "rm -fr #{tmpdir}")
on(host, "ps ax | grep '#{ruby} #{tmpdir}/#{http_server_script}' | grep -v grep | awk '{print \"kill -9 \" $1}' | sh ; sleep 1")
end
step 'checkout with puppet using basic auth' do
pp = <<-EOS
vcsrepo { "#{tmpdir}/#{repo_name}":
ensure => present,
source => "http://#{host}:4567/testrepo.git",
provider => git,
basic_auth_username => '#{user}',
basic_auth_password => '#{password}',
}
EOS
apply_manifest_on(host, pp, :catch_failures => true)
apply_manifest_on(host, pp, :catch_changes => true)
end
step "verify checkout" do
on(host, "ls #{tmpdir}/#{repo_name}/.git/") do |res|
fail_test('checkout not found') unless res.stdout.include? "HEAD"
end
end
end
test_name 'C3493 - checkout with basic auth (https protocol)'
skip_test 'waiting for CA trust solution'
# Globals
repo_name = 'testrepo_checkout'
user = 'foo'
password = 'bar'
http_server_script = 'basic_auth_https_daemon.rb'
hosts.each do |host|
ruby = (host.is_pe? && '/opt/puppet/bin/ruby') || 'ruby'
tmpdir = host.tmpdir('vcsrepo')
step 'setup - create repo' do
git_pkg = 'git'
if host['platform'] =~ /ubuntu-10/
git_pkg = 'git-core'
end
install_package(host, git_pkg)
my_root = File.expand_path(File.join(File.dirname(__FILE__), '../../../..'))
scp_to(host, "#{my_root}/acceptance/files/create_git_repo.sh", tmpdir)
on(host, "cd #{tmpdir} && ./create_git_repo.sh")
end
step 'setup - start https server' do
script =<<-EOF
require 'webrick'
require 'webrick/https'
authenticate = Proc.new do |req, res|
WEBrick::HTTPAuth.basic_auth(req, res, '') do |user, password|
user == '#{user}' && password == '#{password}'
end
end
server = WEBrick::HTTPServer.new(
:Port => 8443,
:DocumentRoot => "#{tmpdir}",
:DocumentRootOptions=> {:HandlerCallback => authenticate},
:SSLEnable => true,
:SSLVerifyClient => OpenSSL::SSL::VERIFY_NONE,
:SSLCertificate => OpenSSL::X509::Certificate.new( File.open("#{tmpdir}/server.crt").read),
:SSLPrivateKey => OpenSSL::PKey::RSA.new( File.open("#{tmpdir}/server.key").read),
:SSLCertName => [ [ "CN",WEBrick::Utils::getservername ] ])
WEBrick::Daemon.start
server.start
EOF
create_remote_file(host, "#{tmpdir}/#{http_server_script}", script)
on(host, "#{ruby} #{tmpdir}/#{http_server_script}")
end
teardown do
on(host, "rm -fr #{tmpdir}")
on(host, "ps ax | grep '#{ruby} #{tmpdir}/#{http_server_script}' | grep -v grep | awk '{print \"kill -9 \" $1}' | sh ; sleep 1")
end
step 'checkout with puppet using basic auth' do
pp = <<-EOS
vcsrepo { "#{tmpdir}/#{repo_name}":
ensure => present,
source => "http://#{host}:8443/testrepo.git",
provider => git,
basic_auth_username => '#{user}',
basic_auth_password => '#{password}',
}
EOS
apply_manifest_on(host, pp, :catch_failures => true)
apply_manifest_on(host, pp, :catch_changes => true)
end
step "verify checkout" do
on(host, "ls #{tmpdir}/#{repo_name}/.git/") do |res|
fail_test('checkout not found') unless res.stdout.include? "HEAD"
end
end
end
test_name 'C3494 - checkout with basic auth (git protocol)'
# Globals
repo_name = 'testrepo_checkout'
user = 'foo'
password = 'bar'
http_server_script = 'basic_auth_http_daemon.rb'
hosts.each do |host|
tmpdir = host.tmpdir('vcsrepo')
step 'setup - create repo' do
git_pkg = 'git'
if host['platform'] =~ /ubuntu-10/
git_pkg = 'git-core'
end
install_package(host, git_pkg)
my_root = File.expand_path(File.join(File.dirname(__FILE__), '../../../../..'))
scp_to(host, "#{my_root}/acceptance/files/create_git_repo.sh", tmpdir)
on(host, "cd #{tmpdir} && ./create_git_repo.sh")
end
step 'setup - start git daemon' do
install_package(host, 'git-daemon') unless host['platform'] =~ /debian|ubuntu/
on(host, "git daemon --base-path=#{tmpdir} --export-all --reuseaddr --verbose --detach")
end
teardown do
on(host, "rm -fr #{tmpdir}")
on(host, 'pkill -9 git-daemon ; sleep 1')
end
step 'checkout with puppet using basic auth' do
pp = <<-EOS
vcsrepo { "#{tmpdir}/#{repo_name}":
ensure => present,
source => "git://#{host}/testrepo.git",
provider => git,
basic_auth_username => '#{user}',
basic_auth_password => '#{password}',
}
EOS
apply_manifest_on(host, pp, :catch_failures => true)
apply_manifest_on(host, pp, :catch_changes => true)
end
step "verify checkout (silent error for basic auth using git protocol)" do
on(host, "ls #{tmpdir}/#{repo_name}/.git/") do |res|
fail_test('checkout not found') unless res.stdout.include? "HEAD"
end
end
end
test_name 'C3438 - checkout a branch (file protocol)'
# Globals
repo_name = 'testrepo_branch_checkout'
branch = 'a_branch'
hosts.each do |host|
tmpdir = host.tmpdir('vcsrepo')
step 'setup - create repo' do
git_pkg = 'git'
if host['platform'] =~ /ubuntu-10/
git_pkg = 'git-core'
end
install_package(host, git_pkg)
my_root = File.expand_path(File.join(File.dirname(__FILE__), '../../../..'))
scp_to(host, "#{my_root}/acceptance/files/create_git_repo.sh", tmpdir)
on(host, "cd #{tmpdir} && ./create_git_repo.sh")
end
teardown do
on(host, "rm -fr #{tmpdir}")
end
step 'checkout a branch with puppet' do
pp = <<-EOS
vcsrepo { "#{tmpdir}/#{repo_name}":
ensure => present,
source => "file://#{tmpdir}/testrepo.git",
provider => git,
revision => '#{branch}',
}
EOS
apply_manifest_on(host, pp, :catch_failures => true)
apply_manifest_on(host, pp, :catch_changes => true)
end
step "verify checkout is on the #{branch} branch" do
on(host, "ls #{tmpdir}/#{repo_name}/.git/") do |res|
fail_test('checkout not found') unless res.stdout.include? "HEAD"
end
on(host, "cat #{tmpdir}/#{repo_name}/.git/HEAD") do |res|
fail_test('branch not found') unless res.stdout.include? "ref: refs/heads/#{branch}"
end
end
end
test_name 'C3437 - checkout a branch (file path)'
# Globals
repo_name = 'testrepo_branch_checkout'
branch = 'a_branch'
hosts.each do |host|
tmpdir = host.tmpdir('vcsrepo')
step 'setup - create repo' do
git_pkg = 'git'
if host['platform'] =~ /ubuntu-10/
git_pkg = 'git-core'
end
install_package(host, git_pkg)
my_root = File.expand_path(File.join(File.dirname(__FILE__), '../../../..'))
scp_to(host, "#{my_root}/acceptance/files/create_git_repo.sh", tmpdir)
on(host, "cd #{tmpdir} && ./create_git_repo.sh")
end
teardown do
on(host, "rm -fr #{tmpdir}")
end
step 'checkout a branch with puppet' do
pp = <<-EOS
vcsrepo { "#{tmpdir}/#{repo_name}":
ensure => present,
source => "#{tmpdir}/testrepo.git",
provider => git,
revision => '#{branch}',
}
EOS
apply_manifest_on(host, pp, :catch_failures => true)
apply_manifest_on(host, pp, :catch_changes => true)
end
step "verify checkout is on the #{branch} branch" do
on(host, "ls #{tmpdir}/#{repo_name}/.git/") do |res|
fail_test('checkout not found') unless res.stdout.include? "HEAD"
end
on(host, "cat #{tmpdir}/#{repo_name}/.git/HEAD") do |res|
fail_test('branch not found') unless res.stdout.include? "ref: refs/heads/#{branch}"
end
end
end
test_name 'C3436 - checkout a branch (git protocol)'
# Globals
repo_name = 'testrepo_branch_checkout'
branch = 'a_branch'
hosts.each do |host|
tmpdir = host.tmpdir('vcsrepo')
step 'setup - create repo' do
git_pkg = 'git'
if host['platform'] =~ /ubuntu-10/
git_pkg = 'git-core'
end
install_package(host, git_pkg)
my_root = File.expand_path(File.join(File.dirname(__FILE__), '../../../..'))
scp_to(host, "#{my_root}/acceptance/files/create_git_repo.sh", tmpdir)
on(host, "cd #{tmpdir} && ./create_git_repo.sh")
end
step 'setup - start git daemon' do
install_package(host, 'git-daemon') unless host['platform'] =~ /debian|ubuntu/
on(host, "git daemon --base-path=#{tmpdir} --export-all --reuseaddr --verbose --detach")
end
teardown do
on(host, "rm -fr #{tmpdir}")
on(host, 'pkill -9 git-daemon ; sleep 1')
end
step 'checkout a branch with puppet' do
pp = <<-EOS
vcsrepo { "#{tmpdir}/#{repo_name}":
ensure => present,
source => "git://#{host}/testrepo.git",
provider => git,
revision => '#{branch}',
}
EOS
apply_manifest_on(host, pp, :catch_failures => true)
apply_manifest_on(host, pp, :catch_changes => true)
end
step "verify checkout is on the #{branch} branch" do
on(host, "ls #{tmpdir}/#{repo_name}/.git/") do |res|
fail_test('checkout not found') unless res.stdout.include? "HEAD"
end
on(host, "cat #{tmpdir}/#{repo_name}/.git/HEAD") do |res|
fail_test('branch not found') unless res.stdout.include? "ref: refs/heads/#{branch}"
end
end
end
test_name 'C3441 - checkout a branch (http protocol)'
# Globals
repo_name = 'testrepo_branch_checkout'
branch = 'a_branch'
hosts.each do |host|
ruby = (host.is_pe? && '/opt/puppet/bin/ruby') || 'ruby'
tmpdir = host.tmpdir('vcsrepo')
step 'setup - create repo' do
git_pkg = 'git'
if host['platform'] =~ /ubuntu-10/
git_pkg = 'git-core'
end
install_package(host, git_pkg)
my_root = File.expand_path(File.join(File.dirname(__FILE__), '../../../..'))
scp_to(host, "#{my_root}/acceptance/files/create_git_repo.sh", tmpdir)
on(host, "cd #{tmpdir} && ./create_git_repo.sh")
end
step 'setup - start http server' do
http_daemon =<<-EOF
require 'webrick'
server = WEBrick::HTTPServer.new(:Port => 8000, :DocumentRoot => "#{tmpdir}")
WEBrick::Daemon.start
server.start
EOF
create_remote_file(host, '/tmp/http_daemon.rb', http_daemon)
on(host, "#{ruby} /tmp/http_daemon.rb")
end
teardown do
on(host, "rm -fr #{tmpdir}")
on(host, "ps ax | grep '#{ruby} /tmp/http_daemon.rb' | grep -v grep | awk '{print \"kill -9 \" $1}' | sh ; sleep 1")
end
step 'checkout a branch with puppet' do
pp = <<-EOS
vcsrepo { "#{tmpdir}/#{repo_name}":
ensure => present,
source => "http://#{host}:8000/testrepo.git",
provider => git,
revision => '#{branch}',
}
EOS
apply_manifest_on(host, pp, :catch_failures => true)
apply_manifest_on(host, pp, :catch_changes => true)
end
step "verify checkout is on the #{branch} branch" do
on(host, "ls #{tmpdir}/#{repo_name}/.git/") do |res|
fail_test('checkout not found') unless res.stdout.include? "HEAD"
end
on(host, "cat #{tmpdir}/#{repo_name}/.git/HEAD") do |res|
fail_test('branch not found') unless res.stdout.include? "ref: refs/heads/#{branch}"
end
end
end
test_name 'C3442 - checkout a branch (https protocol)'
# Globals
repo_name = 'testrepo_branch_checkout'
branch = 'a_branch'
hosts.each do |host|
ruby = (host.is_pe? && '/opt/puppet/bin/ruby') || 'ruby'
tmpdir = host.tmpdir('vcsrepo')
step 'setup - create repo' do
git_pkg = 'git'
if host['platform'] =~ /ubuntu-10/
git_pkg = 'git-core'
end
install_package(host, git_pkg)
my_root = File.expand_path(File.join(File.dirname(__FILE__), '../../../..'))
scp_to(host, "#{my_root}/acceptance/files/create_git_repo.sh", tmpdir)
on(host, "cd #{tmpdir} && ./create_git_repo.sh")
end
step 'setup - start https server' do
https_daemon =<<-EOF
require 'webrick'
require 'webrick/https'
server = WEBrick::HTTPServer.new(
:Port => 8443,
:DocumentRoot => "#{tmpdir}",
:SSLEnable => true,
:SSLVerifyClient => OpenSSL::SSL::VERIFY_NONE,
:SSLCertificate => OpenSSL::X509::Certificate.new( File.open("#{tmpdir}/server.crt").read),
:SSLPrivateKey => OpenSSL::PKey::RSA.new( File.open("#{tmpdir}/server.key").read),
:SSLCertName => [ [ "CN",WEBrick::Utils::getservername ] ])
WEBrick::Daemon.start
server.start
EOF
create_remote_file(host, '/tmp/https_daemon.rb', https_daemon)
#on(host, "#{ruby} /tmp/https_daemon.rb")
end
teardown do
on(host, "rm -fr #{tmpdir}")
on(host, "ps ax | grep '#{ruby} /tmp/https_daemon.rb' | grep -v grep | awk '{print \"kill -9 \" $1}' | sh ; sleep 1")
end
step 'checkout a branch with puppet' do
pp = <<-EOS
vcsrepo { "#{tmpdir}/#{repo_name}":
ensure => present,
source => "https://github.com/johnduarte/testrepo.git",
provider => git,
revision => '#{branch}',
}
EOS
apply_manifest_on(host, pp, :catch_failures => true)
apply_manifest_on(host, pp, :catch_changes => true)
end
step "verify checkout is on the #{branch} branch" do
on(host, "ls #{tmpdir}/#{repo_name}/.git/") do |res|
fail_test('checkout not found') unless res.stdout.include? "HEAD"
end
on(host, "cat #{tmpdir}/#{repo_name}/.git/HEAD") do |res|
fail_test('branch not found') unless res.stdout.include? "ref: refs/heads/#{branch}"
end
end
end
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment