Merge v0.53 from upstream
This commit is contained in:
commit
b9bdf50628
15 changed files with 150 additions and 75 deletions
24
CHANGELOG.md
24
CHANGELOG.md
|
@ -1,13 +1,35 @@
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
=========
|
=========
|
||||||
|
|
||||||
|
v0.53 (April 12, 2021)
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
Software updates:
|
||||||
|
|
||||||
|
* Upgraded Roundcube to version 1.4.11 addressing a security issue, and its desktop notifications plugin.
|
||||||
|
* Upgraded Z-Push (for Exchange/ActiveSync) to version 2.6.2.
|
||||||
|
|
||||||
|
Control panel:
|
||||||
|
|
||||||
|
* Backblaze B2 is now a supported backup protocol.
|
||||||
|
* Fixed an issue in the daily mail reports.
|
||||||
|
* Sort the Custom DNS by zone and qname, and add an option to go back to the old sort order (creation order).
|
||||||
|
|
||||||
|
Mail:
|
||||||
|
|
||||||
|
* Enable sending DMARC failure reports to senders that request them.
|
||||||
|
|
||||||
|
Setup:
|
||||||
|
|
||||||
|
* Fixed error when upgrading from Nextcloud 13.
|
||||||
|
|
||||||
v0.52 (January 31, 2021)
|
v0.52 (January 31, 2021)
|
||||||
------------------------
|
------------------------
|
||||||
|
|
||||||
Software updates:
|
Software updates:
|
||||||
|
|
||||||
* Upgraded Roundcube to version 1.4.10.
|
* Upgraded Roundcube to version 1.4.10.
|
||||||
* Upgraded zpush to 2.6.1.
|
* Upgraded Z-Push to 2.6.1.
|
||||||
|
|
||||||
Mail:
|
Mail:
|
||||||
|
|
||||||
|
|
|
@ -134,7 +134,7 @@ Clone this repository and checkout the tag corresponding to the most recent rele
|
||||||
|
|
||||||
$ git clone https://github.com/mail-in-a-box/mailinabox
|
$ git clone https://github.com/mail-in-a-box/mailinabox
|
||||||
$ cd mailinabox
|
$ cd mailinabox
|
||||||
$ git checkout v0.52
|
$ git checkout v0.53
|
||||||
|
|
||||||
Begin the installation.
|
Begin the installation.
|
||||||
|
|
||||||
|
|
|
@ -302,17 +302,50 @@ def dns_set_secondary_nameserver():
|
||||||
@app.route('/dns/custom')
|
@app.route('/dns/custom')
|
||||||
@authorized_personnel_only
|
@authorized_personnel_only
|
||||||
def dns_get_records(qname=None, rtype=None):
|
def dns_get_records(qname=None, rtype=None):
|
||||||
from dns_update import get_custom_dns_config
|
# Get the current set of custom DNS records.
|
||||||
return json_response([
|
from dns_update import get_custom_dns_config, get_dns_zones
|
||||||
{
|
records = get_custom_dns_config(env, only_real_records=True)
|
||||||
"qname": r[0],
|
|
||||||
"rtype": r[1],
|
# Filter per the arguments for the more complex GET routes below.
|
||||||
"value": r[2],
|
records = [r for r in records
|
||||||
}
|
if (not qname or r[0] == qname)
|
||||||
for r in get_custom_dns_config(env)
|
and (not rtype or r[1] == rtype) ]
|
||||||
if r[0] != "_secondary_nameserver"
|
|
||||||
and (not qname or r[0] == qname)
|
# Make a better data structure.
|
||||||
and (not rtype or r[1] == rtype) ])
|
records = [
|
||||||
|
{
|
||||||
|
"qname": r[0],
|
||||||
|
"rtype": r[1],
|
||||||
|
"value": r[2],
|
||||||
|
"sort-order": { },
|
||||||
|
} for r in records ]
|
||||||
|
|
||||||
|
# To help with grouping by zone in qname sorting, label each record with which zone it is in.
|
||||||
|
# There's an inconsistency in how we handle zones in get_dns_zones and in sort_domains, so
|
||||||
|
# do this first before sorting the domains within the zones.
|
||||||
|
zones = utils.sort_domains([z[0] for z in get_dns_zones(env)], env)
|
||||||
|
for r in records:
|
||||||
|
for z in zones:
|
||||||
|
if r["qname"] == z or r["qname"].endswith("." + z):
|
||||||
|
r["zone"] = z
|
||||||
|
break
|
||||||
|
|
||||||
|
# Add sorting information. The 'created' order follows the order in the YAML file on disk,
|
||||||
|
# which tracs the order entries were added in the control panel since we append to the end.
|
||||||
|
# The 'qname' sort order sorts by our standard domain name sort (by zone then by qname),
|
||||||
|
# then by rtype, and last by the original order in the YAML file (since sorting by value
|
||||||
|
# may not make sense, unless we parse IP addresses, for example).
|
||||||
|
for i, r in enumerate(records):
|
||||||
|
r["sort-order"]["created"] = i
|
||||||
|
domain_sort_order = utils.sort_domains([r["qname"] for r in records], env)
|
||||||
|
for i, r in enumerate(sorted(records, key = lambda r : (
|
||||||
|
zones.index(r["zone"]),
|
||||||
|
domain_sort_order.index(r["qname"]),
|
||||||
|
r["rtype"]))):
|
||||||
|
r["sort-order"]["qname"] = i
|
||||||
|
|
||||||
|
# Return.
|
||||||
|
return json_response(records)
|
||||||
|
|
||||||
@app.route('/dns/custom/<qname>', methods=['GET', 'POST', 'PUT', 'DELETE'])
|
@app.route('/dns/custom/<qname>', methods=['GET', 'POST', 'PUT', 'DELETE'])
|
||||||
@app.route('/dns/custom/<qname>/<rtype>', methods=['GET', 'POST', 'PUT', 'DELETE'])
|
@app.route('/dns/custom/<qname>/<rtype>', methods=['GET', 'POST', 'PUT', 'DELETE'])
|
||||||
|
|
|
@ -765,7 +765,7 @@ def write_opendkim_tables(domains, env):
|
||||||
|
|
||||||
########################################################################
|
########################################################################
|
||||||
|
|
||||||
def get_custom_dns_config(env):
|
def get_custom_dns_config(env, only_real_records=False):
|
||||||
try:
|
try:
|
||||||
custom_dns = rtyaml.load(open(os.path.join(env['STORAGE_ROOT'], 'dns/custom.yaml')))
|
custom_dns = rtyaml.load(open(os.path.join(env['STORAGE_ROOT'], 'dns/custom.yaml')))
|
||||||
if not isinstance(custom_dns, dict): raise ValueError() # caught below
|
if not isinstance(custom_dns, dict): raise ValueError() # caught below
|
||||||
|
@ -773,6 +773,8 @@ def get_custom_dns_config(env):
|
||||||
return [ ]
|
return [ ]
|
||||||
|
|
||||||
for qname, value in custom_dns.items():
|
for qname, value in custom_dns.items():
|
||||||
|
if qname == "_secondary_nameserver" and only_real_records: continue # skip fake record
|
||||||
|
|
||||||
# Short form. Mapping a domain name to a string is short-hand
|
# Short form. Mapping a domain name to a string is short-hand
|
||||||
# for creating A records.
|
# for creating A records.
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
|
|
|
@ -44,9 +44,8 @@ TIME_DELTAS = OrderedDict([
|
||||||
('today', datetime.datetime.now() - datetime.datetime.now().replace(hour=0, minute=0, second=0))
|
('today', datetime.datetime.now() - datetime.datetime.now().replace(hour=0, minute=0, second=0))
|
||||||
])
|
])
|
||||||
|
|
||||||
# Start date > end date!
|
END_DATE = NOW = datetime.datetime.now()
|
||||||
START_DATE = datetime.datetime.now()
|
START_DATE = None
|
||||||
END_DATE = None
|
|
||||||
|
|
||||||
VERBOSE = False
|
VERBOSE = False
|
||||||
|
|
||||||
|
@ -121,7 +120,7 @@ def scan_mail_log(env):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
print("Scanning logs from {:%Y-%m-%d %H:%M:%S} to {:%Y-%m-%d %H:%M:%S}".format(
|
print("Scanning logs from {:%Y-%m-%d %H:%M:%S} to {:%Y-%m-%d %H:%M:%S}".format(
|
||||||
END_DATE, START_DATE)
|
START_DATE, END_DATE)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Scan the lines in the log files until the date goes out of range
|
# Scan the lines in the log files until the date goes out of range
|
||||||
|
@ -253,7 +252,7 @@ def scan_mail_log(env):
|
||||||
|
|
||||||
if collector["postgrey"]:
|
if collector["postgrey"]:
|
||||||
msg = "Greylisted Email {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}"
|
msg = "Greylisted Email {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}"
|
||||||
print_header(msg.format(END_DATE, START_DATE))
|
print_header(msg.format(START_DATE, END_DATE))
|
||||||
|
|
||||||
print(textwrap.fill(
|
print(textwrap.fill(
|
||||||
"The following mail was greylisted, meaning the emails were temporarily rejected. "
|
"The following mail was greylisted, meaning the emails were temporarily rejected. "
|
||||||
|
@ -291,7 +290,7 @@ def scan_mail_log(env):
|
||||||
|
|
||||||
if collector["rejected"]:
|
if collector["rejected"]:
|
||||||
msg = "Blocked Email {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}"
|
msg = "Blocked Email {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}"
|
||||||
print_header(msg.format(END_DATE, START_DATE))
|
print_header(msg.format(START_DATE, END_DATE))
|
||||||
|
|
||||||
data = OrderedDict(sorted(collector["rejected"].items(), key=email_sort))
|
data = OrderedDict(sorted(collector["rejected"].items(), key=email_sort))
|
||||||
|
|
||||||
|
@ -344,20 +343,20 @@ def scan_mail_log_line(line, collector):
|
||||||
|
|
||||||
# Replaced the dateutil parser for a less clever way of parser that is roughly 4 times faster.
|
# Replaced the dateutil parser for a less clever way of parser that is roughly 4 times faster.
|
||||||
# date = dateutil.parser.parse(date)
|
# date = dateutil.parser.parse(date)
|
||||||
|
|
||||||
# date = datetime.datetime.strptime(date, '%b %d %H:%M:%S')
|
# strptime fails on Feb 29 with ValueError: day is out of range for month if correct year is not provided.
|
||||||
# date = date.replace(START_DATE.year)
|
# See https://bugs.python.org/issue26460
|
||||||
|
date = datetime.datetime.strptime(str(NOW.year) + ' ' + date, '%Y %b %d %H:%M:%S')
|
||||||
# strptime fails on Feb 29 if correct year is not provided. See https://bugs.python.org/issue26460
|
# if log date in future, step back a year
|
||||||
date = datetime.datetime.strptime(str(START_DATE.year) + ' ' + date, '%Y %b %d %H:%M:%S')
|
if date > NOW:
|
||||||
# print("date:", date)
|
date = date.replace(year = NOW.year - 1)
|
||||||
|
#print("date:", date)
|
||||||
|
|
||||||
# Check if the found date is within the time span we are scanning
|
# Check if the found date is within the time span we are scanning
|
||||||
# END_DATE < START_DATE
|
if date > END_DATE:
|
||||||
if date > START_DATE:
|
|
||||||
# Don't process, and halt
|
# Don't process, and halt
|
||||||
return False
|
return False
|
||||||
elif date < END_DATE:
|
elif date < START_DATE:
|
||||||
# Don't process, but continue
|
# Don't process, but continue
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -606,7 +605,7 @@ def email_sort(email):
|
||||||
|
|
||||||
|
|
||||||
def valid_date(string):
|
def valid_date(string):
|
||||||
""" Validate the given date string fetched from the --startdate argument """
|
""" Validate the given date string fetched from the --enddate argument """
|
||||||
try:
|
try:
|
||||||
date = dateutil.parser.parse(string)
|
date = dateutil.parser.parse(string)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
@ -820,12 +819,14 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
parser.add_argument("-t", "--timespan", choices=TIME_DELTAS.keys(), default='today',
|
parser.add_argument("-t", "--timespan", choices=TIME_DELTAS.keys(), default='today',
|
||||||
metavar='<time span>',
|
metavar='<time span>',
|
||||||
help="Time span to scan, going back from the start date. Possible values: "
|
help="Time span to scan, going back from the end date. Possible values: "
|
||||||
"{}. Defaults to 'today'.".format(", ".join(list(TIME_DELTAS.keys()))))
|
"{}. Defaults to 'today'.".format(", ".join(list(TIME_DELTAS.keys()))))
|
||||||
parser.add_argument("-d", "--startdate", action="store", dest="startdate",
|
# keep the --startdate arg for backward compatibility
|
||||||
type=valid_date, metavar='<start date>',
|
parser.add_argument("-d", "--enddate", "--startdate", action="store", dest="enddate",
|
||||||
help="Date and time to start scanning the log file from. If no date is "
|
type=valid_date, metavar='<end date>',
|
||||||
"provided, scanning will start from the current date and time.")
|
help="Date and time to end scanning the log file. If no date is "
|
||||||
|
"provided, scanning will end at the current date and time. "
|
||||||
|
"Alias --startdate is for compatibility.")
|
||||||
parser.add_argument("-u", "--users", action="store", dest="users",
|
parser.add_argument("-u", "--users", action="store", dest="users",
|
||||||
metavar='<email1,email2,email...>',
|
metavar='<email1,email2,email...>',
|
||||||
help="Comma separated list of (partial) email addresses to filter the "
|
help="Comma separated list of (partial) email addresses to filter the "
|
||||||
|
@ -837,13 +838,13 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if args.startdate is not None:
|
if args.enddate is not None:
|
||||||
START_DATE = args.startdate
|
END_DATE = args.enddate
|
||||||
if args.timespan == 'today':
|
if args.timespan == 'today':
|
||||||
args.timespan = 'day'
|
args.timespan = 'day'
|
||||||
print("Setting start date to {}".format(START_DATE))
|
print("Setting end date to {}".format(END_DATE))
|
||||||
|
|
||||||
END_DATE = START_DATE - TIME_DELTAS[args.timespan]
|
START_DATE = END_DATE - TIME_DELTAS[args.timespan]
|
||||||
|
|
||||||
VERBOSE = args.verbose
|
VERBOSE = args.verbose
|
||||||
|
|
||||||
|
|
|
@ -153,8 +153,8 @@ function show_aliases() {
|
||||||
function(r) {
|
function(r) {
|
||||||
$('#alias_table tbody').html("");
|
$('#alias_table tbody').html("");
|
||||||
for (var i = 0; i < r.length; i++) {
|
for (var i = 0; i < r.length; i++) {
|
||||||
var hdr = $("<tr><td colspan='3'><h4/></td></tr>");
|
var hdr = $("<tr><th colspan='4' style='background-color: #EEE'></th></tr>");
|
||||||
hdr.find('h4').text(r[i].domain);
|
hdr.find('th').text(r[i].domain);
|
||||||
$('#alias_table tbody').append(hdr);
|
$('#alias_table tbody').append(hdr);
|
||||||
|
|
||||||
for (var k = 0; k < r[i].aliases.length; k++) {
|
for (var k = 0; k < r[i].aliases.length; k++) {
|
||||||
|
|
|
@ -57,7 +57,13 @@
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
<table id="custom-dns-current" class="table" style="width: auto; display: none">
|
<div style="text-align: right; font-size; 90%; margin-top: 1em;">
|
||||||
|
sort by:
|
||||||
|
<a href="#" onclick="window.miab_custom_dns_data_sort_order='qname'; show_current_custom_dns_update_after_sort(); return false;">domain name</a>
|
||||||
|
|
|
||||||
|
<a href="#" onclick="window.miab_custom_dns_data_sort_order='created'; show_current_custom_dns_update_after_sort(); return false;">created</a>
|
||||||
|
</div>
|
||||||
|
<table id="custom-dns-current" class="table" style="width: auto; display: none; margin-top: 0;">
|
||||||
<thead>
|
<thead>
|
||||||
<th>Domain Name</th>
|
<th>Domain Name</th>
|
||||||
<th>Record Type</th>
|
<th>Record Type</th>
|
||||||
|
@ -192,36 +198,38 @@ function show_current_custom_dns() {
|
||||||
$('#custom-dns-current').fadeIn();
|
$('#custom-dns-current').fadeIn();
|
||||||
else
|
else
|
||||||
$('#custom-dns-current').fadeOut();
|
$('#custom-dns-current').fadeOut();
|
||||||
|
window.miab_custom_dns_data = data;
|
||||||
var reverse_fqdn = function(el) {
|
show_current_custom_dns_update_after_sort();
|
||||||
el.qname = el.qname.split('.').reverse().join('.');
|
});
|
||||||
return el;
|
}
|
||||||
}
|
|
||||||
var sort = function(a, b) {
|
|
||||||
if(a.qname === b.qname) {
|
|
||||||
if(a.rtype === b.rtype) {
|
|
||||||
return a.value > b.value ? 1 : -1;
|
|
||||||
}
|
|
||||||
return a.rtype > b.rtype ? 1 : -1;
|
|
||||||
}
|
|
||||||
return a.qname > b.qname ? 1 : -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
data = data.map(reverse_fqdn).sort(sort).map(reverse_fqdn);
|
function show_current_custom_dns_update_after_sort() {
|
||||||
|
var data = window.miab_custom_dns_data;
|
||||||
|
var sort_key = window.miab_custom_dns_data_sort_order || "qname";
|
||||||
|
|
||||||
$('#custom-dns-current').find("tbody").text('');
|
data.sort(function(a, b) { return a["sort-order"][sort_key] - b["sort-order"][sort_key] });
|
||||||
|
|
||||||
|
var tbody = $('#custom-dns-current').find("tbody");
|
||||||
|
tbody.text('');
|
||||||
|
var last_zone = null;
|
||||||
for (var i = 0; i < data.length; i++) {
|
for (var i = 0; i < data.length; i++) {
|
||||||
|
if (sort_key == "qname" && data[i].zone != last_zone) {
|
||||||
|
var r = $("<tr><th colspan=4 style='background-color: #EEE'></th></tr>");
|
||||||
|
r.find("th").text(data[i].zone);
|
||||||
|
tbody.append(r);
|
||||||
|
last_zone = data[i].zone;
|
||||||
|
}
|
||||||
|
|
||||||
var tr = $("<tr/>");
|
var tr = $("<tr/>");
|
||||||
$('#custom-dns-current').find("tbody").append(tr);
|
tbody.append(tr);
|
||||||
tr.attr('data-qname', data[i].qname);
|
tr.attr('data-qname', data[i].qname);
|
||||||
tr.attr('data-rtype', data[i].rtype);
|
tr.attr('data-rtype', data[i].rtype);
|
||||||
tr.attr('data-value', data[i].value);
|
tr.attr('data-value', data[i].value);
|
||||||
tr.append($('<td class="long"/>').text(data[i].qname));
|
tr.append($('<td class="long"/>').text(data[i].qname));
|
||||||
tr.append($('<td/>').text(data[i].rtype));
|
tr.append($('<td/>').text(data[i].rtype));
|
||||||
tr.append($('<td class="long"/>').text(data[i].value));
|
tr.append($('<td class="long" style="max-width: 40em"/>').text(data[i].value));
|
||||||
tr.append($('<td>[<a href="#" onclick="return delete_custom_dns_record(this)">delete</a>]</td>'));
|
tr.append($('<td>[<a href="#" onclick="return delete_custom_dns_record(this)">delete</a>]</td>'));
|
||||||
}
|
}
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function delete_custom_dns_record(elem) {
|
function delete_custom_dns_record(elem) {
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
<option value="local">{{hostname}}</option>
|
<option value="local">{{hostname}}</option>
|
||||||
<option value="rsync">rsync</option>
|
<option value="rsync">rsync</option>
|
||||||
<option value="s3">Amazon S3</option>
|
<option value="s3">Amazon S3</option>
|
||||||
|
<option value="b2">Backblaze B2</option>
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
<h2>Users</h2>
|
<h2>Users</h2>
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
#user_table h4 { margin: 1em 0 0 0; }
|
|
||||||
#user_table tr.account_inactive td.address { color: #888; text-decoration: line-through; }
|
#user_table tr.account_inactive td.address { color: #888; text-decoration: line-through; }
|
||||||
#user_table .actions { margin-top: .33em; font-size: 95%; }
|
#user_table .actions { margin-top: .33em; font-size: 95%; }
|
||||||
#user_table .account_inactive .if_active { display: none; }
|
#user_table .account_inactive .if_active { display: none; }
|
||||||
|
@ -183,8 +182,8 @@ function show_users() {
|
||||||
function(r) {
|
function(r) {
|
||||||
$('#user_table tbody').html("");
|
$('#user_table tbody').html("");
|
||||||
for (var i = 0; i < r.length; i++) {
|
for (var i = 0; i < r.length; i++) {
|
||||||
var hdr = $("<tr><td colspan='6'><h4/></td></tr>");
|
var hdr = $("<tr><th colspan='6' style='background-color: #EEE'></th></tr>");
|
||||||
hdr.find('h4').text(r[i].domain);
|
hdr.find('th').text(r[i].domain);
|
||||||
$('#user_table tbody').append(hdr);
|
$('#user_table tbody').append(hdr);
|
||||||
|
|
||||||
for (var k = 0; k < r[i].users.length; k++) {
|
for (var k = 0; k < r[i].users.length; k++) {
|
||||||
|
|
|
@ -62,7 +62,8 @@ chmod go-rwx $STORAGE_ROOT/mail/dkim
|
||||||
|
|
||||||
management/editconf.py /etc/opendmarc.conf -s \
|
management/editconf.py /etc/opendmarc.conf -s \
|
||||||
"Syslog=true" \
|
"Syslog=true" \
|
||||||
"Socket=inet:8893@[127.0.0.1]"
|
"Socket=inet:8893@[127.0.0.1]" \
|
||||||
|
"FailureReports=true"
|
||||||
|
|
||||||
# SPFIgnoreResults causes the filter to ignore any SPF results in the header
|
# SPFIgnoreResults causes the filter to ignore any SPF results in the header
|
||||||
# of the message. This is useful if you want the filter to perfrom SPF checks
|
# of the message. This is useful if you want the filter to perfrom SPF checks
|
||||||
|
@ -81,6 +82,12 @@ management/editconf.py /etc/opendmarc.conf -s \
|
||||||
management/editconf.py /etc/opendmarc.conf -s \
|
management/editconf.py /etc/opendmarc.conf -s \
|
||||||
"SPFSelfValidate=true"
|
"SPFSelfValidate=true"
|
||||||
|
|
||||||
|
# Enables generation of failure reports for sending domains that publish a
|
||||||
|
# "none" policy.
|
||||||
|
|
||||||
|
management/editconf.py /etc/opendmarc.conf -s \
|
||||||
|
"FailureReportsOnNone=true"
|
||||||
|
|
||||||
# AlwaysAddARHeader Adds an "Authentication-Results:" header field even to
|
# AlwaysAddARHeader Adds an "Authentication-Results:" header field even to
|
||||||
# unsigned messages from domains with no "signs all" policy. The reported DKIM
|
# unsigned messages from domains with no "signs all" policy. The reported DKIM
|
||||||
# result will be "none" in such cases. Normally unsigned mail from non-strict
|
# result will be "none" in such cases. Normally unsigned mail from non-strict
|
||||||
|
|
|
@ -27,9 +27,10 @@ done
|
||||||
# provision free TLS certificates.
|
# provision free TLS certificates.
|
||||||
apt_install duplicity python3-pip python3-gpg virtualenv certbot
|
apt_install duplicity python3-pip python3-gpg virtualenv certbot
|
||||||
|
|
||||||
|
# b2sdk is used for backblaze backups.
|
||||||
# boto is used for amazon aws backups.
|
# boto is used for amazon aws backups.
|
||||||
# Both are installed outside the pipenv, so they can be used by duplicity
|
# Both are installed outside the pipenv, so they can be used by duplicity
|
||||||
hide_output pip3 install --upgrade boto
|
hide_output pip3 install --upgrade b2sdk boto
|
||||||
|
|
||||||
# Create a virtualenv for the installation of Python 3 packages
|
# Create a virtualenv for the installation of Python 3 packages
|
||||||
# used by the management daemon.
|
# used by the management daemon.
|
||||||
|
@ -50,7 +51,7 @@ hide_output $venv/bin/pip install --upgrade \
|
||||||
rtyaml "email_validator>=1.0.0" "exclusiveprocess" \
|
rtyaml "email_validator>=1.0.0" "exclusiveprocess" \
|
||||||
flask dnspython python-dateutil \
|
flask dnspython python-dateutil \
|
||||||
qrcode[pil] pyotp \
|
qrcode[pil] pyotp \
|
||||||
"idna>=2.0.0" "cryptography==2.2.2" boto psutil postfix-mta-sts-resolver
|
"idna>=2.0.0" "cryptography==2.2.2" boto psutil postfix-mta-sts-resolver b2sdk
|
||||||
|
|
||||||
# Make the venv use the packaged gpgme bindings (the ones pip provides are severely out-of-date)
|
# Make the venv use the packaged gpgme bindings (the ones pip provides are severely out-of-date)
|
||||||
if [ ! -d $venv/lib/python$(python_version)/site-packages/gpg/ ]; then
|
if [ ! -d $venv/lib/python$(python_version)/site-packages/gpg/ ]; then
|
||||||
|
|
|
@ -24,8 +24,8 @@ InstallNextcloud() {
|
||||||
hash_contacts=$4
|
hash_contacts=$4
|
||||||
version_calendar=$5
|
version_calendar=$5
|
||||||
hash_calendar=$6
|
hash_calendar=$6
|
||||||
version_user_external=$7
|
version_user_external=${7:-}
|
||||||
hash_user_external=$8
|
hash_user_external=${8:-}
|
||||||
|
|
||||||
echo
|
echo
|
||||||
echo "Upgrading to Nextcloud version $version"
|
echo "Upgrading to Nextcloud version $version"
|
||||||
|
|
|
@ -112,7 +112,7 @@ apt_get_quiet autoremove
|
||||||
# * openssh-client: provides ssh-keygen
|
# * openssh-client: provides ssh-keygen
|
||||||
|
|
||||||
echo Installing system packages...
|
echo Installing system packages...
|
||||||
apt_install python3 python3-dev python3-pip \
|
apt_install python3 python3-dev python3-pip python3-setuptools \
|
||||||
netcat-openbsd wget curl git sudo coreutils bc \
|
netcat-openbsd wget curl git sudo coreutils bc \
|
||||||
haveged pollinate openssh-client unzip \
|
haveged pollinate openssh-client unzip \
|
||||||
unattended-upgrades cron ntp fail2ban rsyslog
|
unattended-upgrades cron ntp fail2ban rsyslog
|
||||||
|
|
|
@ -28,10 +28,11 @@ apt_install \
|
||||||
# Install Roundcube from source if it is not already present or if it is out of date.
|
# Install Roundcube from source if it is not already present or if it is out of date.
|
||||||
# Combine the Roundcube version number with the commit hash of plugins to track
|
# Combine the Roundcube version number with the commit hash of plugins to track
|
||||||
# whether we have the latest version of everything.
|
# whether we have the latest version of everything.
|
||||||
|
|
||||||
VERSION=1.4.11
|
VERSION=1.4.11
|
||||||
HASH=3877f0e70f29e7d0612155632e48c3db1e626be3
|
HASH=3877f0e70f29e7d0612155632e48c3db1e626be3
|
||||||
PERSISTENT_LOGIN_VERSION=6b3fc450cae23ccb2f393d0ef67aa319e877e435
|
PERSISTENT_LOGIN_VERSION=6b3fc450cae23ccb2f393d0ef67aa319e877e435 # version 5.2.0
|
||||||
HTML5_NOTIFIER_VERSION=4b370e3cd60dabd2f428a26f45b677ad1b7118d5
|
HTML5_NOTIFIER_VERSION=68d9ca194212e15b3c7225eb6085dbcf02fd13d7 # version 0.6.4+
|
||||||
CARDDAV_VERSION=3.0.3
|
CARDDAV_VERSION=3.0.3
|
||||||
CARDDAV_HASH=d1e3b0d851ffa2c6bd42bf0c04f70d0e1d0d78f8
|
CARDDAV_HASH=d1e3b0d851ffa2c6bd42bf0c04f70d0e1d0d78f8
|
||||||
|
|
||||||
|
|
|
@ -22,8 +22,8 @@ apt_install \
|
||||||
phpenmod -v php imap
|
phpenmod -v php imap
|
||||||
|
|
||||||
# Copy Z-Push into place.
|
# Copy Z-Push into place.
|
||||||
VERSION=2.6.1
|
VERSION=2.6.2
|
||||||
TARGETHASH=a4415f0dc0ed884acc8ad5c506944fc7e6d68eeb
|
TARGETHASH=4b312d64227ef887b24d9cc8f0ae17519586f6e2
|
||||||
needs_update=0 #NODOC
|
needs_update=0 #NODOC
|
||||||
if [ ! -f /usr/local/lib/z-push/version ]; then
|
if [ ! -f /usr/local/lib/z-push/version ]; then
|
||||||
needs_update=1 #NODOC
|
needs_update=1 #NODOC
|
||||||
|
|
Loading…
Reference in a new issue