Build packages in makefile (#7759)

This commit is contained in:
Daniel Nelson 2020-06-30 00:15:28 -07:00 committed by GitHub
parent 063c61f7aa
commit b6560c8b01
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 555 additions and 1093 deletions

View File

@ -6,10 +6,10 @@ defaults:
GOFLAGS: -p=8
go-1_13: &go-1_13
docker:
- image: 'quay.io/influxdb/telegraf-ci:1.13.11'
- image: 'quay.io/influxdb/telegraf-ci:1.13.12'
go-1_14: &go-1_14
docker:
- image: 'quay.io/influxdb/telegraf-ci:1.14.3'
- image: 'quay.io/influxdb/telegraf-ci:1.14.4'
mac: &mac
macos:
xcode: 11.3.1
@ -109,26 +109,28 @@ jobs:
at: '/go'
- run: 'make package'
- store_artifacts:
path: './build'
destination: 'build'
path: './build/dist'
destination: 'build/dist'
release:
<<: [ *defaults, *go-1_14 ]
steps:
- attach_workspace:
at: '/go'
- run: 'make package-release'
- run: 'make package'
- store_artifacts:
path: './build'
destination: 'build'
path: './build/dist'
destination: 'build/dist'
nightly:
<<: [ *defaults, *go-1_14 ]
steps:
- attach_workspace:
at: '/go'
- run: 'make package-nightly'
- run: 'NIGHTLY=1 make package'
- run: 'make upload-nightly'
- store_artifacts:
path: './build'
destination: 'build'
path: './build/dist'
destination: 'build/dist'
workflows:
version: 2

View File

@ -1,6 +1,6 @@
The MIT License (MIT)
Copyright (c) 2015-2019 InfluxData Inc.
Copyright (c) 2015-2020 InfluxData Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

350
Makefile
View File

@ -1,34 +1,78 @@
ifeq ($(OS), Windows_NT)
VERSION := $(shell git describe --exact-match --tags 2>nul)
HOME := $(HOMEPATH)
CGO_ENABLED ?= 0
export CGO_ENABLED
devnull := nul
else
VERSION := $(shell git describe --exact-match --tags 2>/dev/null)
devnull := /dev/null
endif
PREFIX := /usr/local
BRANCH := $(shell git rev-parse --abbrev-ref HEAD)
COMMIT := $(shell git rev-parse --short HEAD)
next_version := 1.15.0
tag := $(shell git describe --exact-match --tags 2>$(devnull))
branch := $(shell git rev-parse --abbrev-ref HEAD)
commit := $(shell git rev-parse --short=8 HEAD)
ifdef NIGHTLY
version := $(next_version)
rpm_version := nightly
rpm_iteration := 0
deb_version := nightly
deb_iteration := 0
tar_version := nightly
else ifeq ($(tag),)
version := $(next_version)
rpm_version := $(version)~$(commit)-0
rpm_iteration := 0
deb_version := $(version)~$(commit)-0
deb_iteration := 0
tar_version := $(version)~$(commit)
else ifneq ($(findstring -rc,$(tag)),)
version := $(word 1,$(subst -, ,$(tag)):v%=%)
rc := $(word 2,$(subst -, ,$(tag)))
rpm_version := $(version)-0.$(rc)
rpm_iteration := 0.$(subst rc,,$(rc))
deb_version := $(version)~$(rc)-1
deb_iteration := 0
tar_version := $(version)~$(rc)
else
version := $(tag:v%=%)
rpm_version := $(version)-1
rpm_iteration := 1
deb_version := $(version)-1
deb_iteration := 1
tar_version := $(version)
endif
MAKEFLAGS += --no-print-directory
GOOS ?= $(shell go env GOOS)
GOARCH ?= $(shell go env GOARCH)
HOSTGO := env -u GOOS -u GOARCH -u GOARM -- go
LDFLAGS := $(LDFLAGS) -X main.commit=$(commit) -X main.branch=$(branch) -X main.version=$(version)
GOFILES ?= $(shell git ls-files '*.go')
GOFMT ?= $(shell gofmt -l -s $(filter-out plugins/parsers/influx/machine.go, $(GOFILES)))
BUILDFLAGS ?=
ifdef GOBIN
PATH := $(GOBIN):$(PATH)
else
PATH := $(subst :,/bin:,$(shell go env GOPATH))/bin:$(PATH)
endif
LDFLAGS := $(LDFLAGS) -X main.commit=$(COMMIT) -X main.branch=$(BRANCH)
ifdef VERSION
LDFLAGS += -X main.version=$(VERSION)
endif
prefix ?= /usr/local
bindir ?= $(prefix)/bin
sysconfdir ?= $(prefix)/etc
localstatedir ?= $(prefix)/var
pkgdir ?= build/dist
.PHONY: all
all:
@$(MAKE) --no-print-directory deps
@$(MAKE) --no-print-directory telegraf
@$(MAKE) deps
@$(MAKE) telegraf
.PHONY: help
help:
@echo 'Targets:'
@echo ' all - download dependencies and compile telegraf binary'
@echo ' deps - download dependencies'
@echo ' telegraf - compile telegraf binary'
@echo ' test - run short unit tests'
@echo ' fmt - format source files'
@echo ' tidy - tidy go modules'
@echo ' check-deps - check docs/LICENSE_OF_DEPENDENCIES.md'
@echo ' clean - delete build artifacts'
@echo ''
@echo 'Package Targets:'
@$(foreach dist,$(dists),echo " $(dist)";)
.PHONY: deps
deps:
@ -38,15 +82,6 @@ deps:
telegraf:
go build -ldflags "$(LDFLAGS)" ./cmd/telegraf
.PHONY: go-install
go-install:
go install -ldflags "-w -s $(LDFLAGS)" ./cmd/telegraf
.PHONY: install
install: telegraf
mkdir -p $(DESTDIR)$(PREFIX)/bin/
cp telegraf $(DESTDIR)$(PREFIX)/bin/
.PHONY: test
test:
go test -short ./...
@ -105,24 +140,11 @@ test-all: fmtcheck vet
check-deps:
./scripts/check-deps.sh
.PHONY: package
package:
./scripts/build.py --package --platform=all --arch=all
.PHONY: package-release
package-release:
./scripts/build.py --release --package --platform=all --arch=all \
--upload --bucket=dl.influxdata.com/telegraf/releases
.PHONY: package-nightly
package-nightly:
./scripts/build.py --nightly --package --platform=all --arch=all \
--upload --bucket=dl.influxdata.com/telegraf/nightlies
.PHONY: clean
clean:
rm -f telegraf
rm -f telegraf.exe
rm -rf build
.PHONY: docker-image
docker-image:
@ -131,25 +153,237 @@ docker-image:
plugins/parsers/influx/machine.go: plugins/parsers/influx/machine.go.rl
ragel -Z -G2 $^ -o $@
.PHONY: static
static:
@echo "Building static linux binary..."
@CGO_ENABLED=0 \
GOOS=linux \
GOARCH=amd64 \
go build -ldflags "$(LDFLAGS)" ./cmd/telegraf
.PHONY: plugin-%
plugin-%:
@echo "Starting dev environment for $${$(@)} input plugin..."
@docker-compose -f plugins/inputs/$${$(@)}/dev/docker-compose.yml up
.PHONY: ci-1.14
ci-1.14:
docker build -t quay.io/influxdb/telegraf-ci:1.14.3 - < scripts/ci-1.14.docker
docker push quay.io/influxdb/telegraf-ci:1.14.3
.PHONY: ci-1.13
ci-1.13:
docker build -t quay.io/influxdb/telegraf-ci:1.13.8 - < scripts/ci-1.13.docker
docker push quay.io/influxdb/telegraf-ci:1.13.8
docker build -t quay.io/influxdb/telegraf-ci:1.13.11 - < scripts/ci-1.13.docker
docker push quay.io/influxdb/telegraf-ci:1.13.11
.PHONY: ci-1.12
ci-1.12:
docker build -t quay.io/influxdb/telegraf-ci:1.12.17 - < scripts/ci-1.12.docker
docker push quay.io/influxdb/telegraf-ci:1.12.17
.PHONY: install
install: $(buildbin)
@mkdir -pv $(DESTDIR)$(bindir)
@mkdir -pv $(DESTDIR)$(sysconfdir)
@mkdir -pv $(DESTDIR)$(localstatedir)
@if [ $(GOOS) != "windows" ]; then mkdir -pv $(DESTDIR)$(sysconfdir)/logrotate.d; fi
@if [ $(GOOS) != "windows" ]; then mkdir -pv $(DESTDIR)$(localstatedir)/log/telegraf; fi
@if [ $(GOOS) != "windows" ]; then mkdir -pv $(DESTDIR)$(sysconfdir)/telegraf/telegraf.d; fi
@cp -fv $(buildbin) $(DESTDIR)$(bindir)
@if [ $(GOOS) != "windows" ]; then cp -fv etc/telegraf.conf $(DESTDIR)$(sysconfdir)/telegraf/telegraf.conf$(conf_suffix); fi
@if [ $(GOOS) != "windows" ]; then cp -fv etc/logrotate.d/telegraf $(DESTDIR)$(sysconfdir)/logrotate.d; fi
@if [ $(GOOS) = "windows" ]; then cp -fv etc/telegraf_windows.conf $(DESTDIR)/telegraf.conf; fi
@if [ $(GOOS) = "linux" ]; then mkdir -pv $(DESTDIR)$(prefix)/lib/telegraf/scripts; fi
@if [ $(GOOS) = "linux" ]; then cp -fv scripts/telegraf.service $(DESTDIR)$(prefix)/lib/telegraf/scripts; fi
@if [ $(GOOS) = "linux" ]; then cp -fv scripts/init.sh $(DESTDIR)$(prefix)/lib/telegraf/scripts; fi
# Telegraf build per platform. This improves package performance by sharing
# the bin between deb/rpm/tar packages over building directly into the package
# directory.
$(buildbin):
@mkdir -pv $(dir $@)
go build -o $(dir $@) -ldflags "$(LDFLAGS)" ./cmd/telegraf
debs := telegraf_$(deb_version)_amd64.deb
debs += telegraf_$(deb_version)_arm64.deb
debs += telegraf_$(deb_version)_armel.deb
debs += telegraf_$(deb_version)_armhf.deb
debs += telegraf_$(deb_version)_i386.deb
debs += telegraf_$(deb_version)_mips.deb
debs += telegraf_$(deb_version)_mipsel.deb
debs += telegraf_$(deb_version)_s390x.deb
rpms += telegraf-$(rpm_version).aarch64.rpm
rpms += telegraf-$(rpm_version).armel.rpm
rpms += telegraf-$(rpm_version).armv6hl.rpm
rpms += telegraf-$(rpm_version).i386.rpm
rpms += telegraf-$(rpm_version).s390x.rpm
rpms += telegraf-$(rpm_version).x86_64.rpm
tars += telegraf-$(tar_version)_darwin_amd64.tar.gz
tars += telegraf-$(tar_version)_freebsd_amd64.tar.gz
tars += telegraf-$(tar_version)_freebsd_i386.tar.gz
tars += telegraf-$(tar_version)_linux_amd64.tar.gz
tars += telegraf-$(tar_version)_linux_arm64.tar.gz
tars += telegraf-$(tar_version)_linux_armel.tar.gz
tars += telegraf-$(tar_version)_linux_armhf.tar.gz
tars += telegraf-$(tar_version)_linux_i386.tar.gz
tars += telegraf-$(tar_version)_linux_mips.tar.gz
tars += telegraf-$(tar_version)_linux_mipsel.tar.gz
tars += telegraf-$(tar_version)_linux_s390x.tar.gz
tars += telegraf-$(tar_version)_static_linux_amd64.tar.gz
zips += telegraf-$(tar_version)_windows_amd64.zip
zips += telegraf-$(tar_version)_windows_i386.zip
dists := $(debs) $(rpms) $(tars) $(zips)
.PHONY: package
package: $(dists)
rpm_amd64 := amd64
rpm_386 := i386
rpm_s390x := s390x
rpm_arm5 := armel
rpm_arm6 := armv6hl
rpm_arm647 := aarch64
rpm_arch := $(rpm_$(GOARCH)$(GOARM))
.PHONY: $(rpms)
$(rpms):
@$(MAKE) install
@mkdir -p $(pkgdir)
fpm --force \
--log info \
--architecture $(rpm_arch) \
--input-type dir \
--output-type rpm \
--vendor InfluxData \
--url https://github.com/influxdata/telegraf \
--license MIT \
--maintainer support@influxdb.com \
--config-files /etc/telegraf/telegraf.conf \
--config-files /etc/logrotate.d/telegraf \
--after-install scripts/rpm/post-install.sh \
--before-install scripts/rpm/pre-install.sh \
--after-remove scripts/rpm/post-remove.sh \
--description "Plugin-driven server agent for reporting metrics into InfluxDB." \
--depends coreutils \
--depends shadow-utils \
--rpm-posttrans scripts/rpm/post-install.sh \
--name telegraf \
--version $(version) \
--iteration $(rpm_iteration) \
--chdir $(DESTDIR) \
--package $(pkgdir)/$@
deb_amd64 := amd64
deb_386 := i386
deb_s390x := s390x
deb_arm5 := armel
deb_arm6 := armhf
deb_arm647 := arm64
deb_arch := $(deb_$(GOARCH)$(GOARM))
.PHONY: $(debs)
$(debs):
@$(MAKE) install
@mkdir -pv $(pkgdir)
fpm --force \
--log info \
--architecture $(deb_arch) \
--input-type dir \
--output-type deb \
--vendor InfluxData \
--url https://github.com/influxdata/telegraf \
--license MIT \
--maintainer support@influxdb.com \
--config-files /etc/telegraf/telegraf.conf.sample \
--config-files /etc/logrotate.d/telegraf \
--after-install scripts/deb/post-install.sh \
--before-install scripts/deb/pre-install.sh \
--after-remove scripts/deb/post-remove.sh \
--before-remove scripts/deb/pre-remove.sh \
--description "Plugin-driven server agent for reporting metrics into InfluxDB." \
--name telegraf \
--version $(version) \
--iteration $(deb_iteration) \
--chdir $(DESTDIR) \
--package $(pkgdir)/$@
.PHONY: $(zips)
$(zips):
@$(MAKE) install
@mkdir -p $(pkgdir)
(cd $(dir $(DESTDIR)) && zip -r - ./*) > $(pkgdir)/$@
.PHONY: $(tars)
$(tars):
@$(MAKE) install
@mkdir -p $(pkgdir)
tar --owner 0 --group 0 -czvf $(pkgdir)/$@ -C $(dir $(DESTDIR)) .
.PHONY: upload-nightly
upload-nightly:
aws s3 sync $(pkgdir) s3://dl.influxdata.com/telegraf/nightlies/ \
--exclude "*" \
--include "*.tar.gz" \
--include "*.deb" \
--include "*.rpm" \
--include "*.zip" \
--acl public-read
%amd64.deb %x86_64.rpm %linux_amd64.tar.gz: export GOOS := linux
%amd64.deb %x86_64.rpm %linux_amd64.tar.gz: export GOARCH := amd64
%static_linux_amd64.tar.gz: export cgo := -nocgo
%static_linux_amd64.tar.gz: export CGO_ENABLED := 0
%i386.deb %i386.rpm %linux_i386.tar.gz: export GOOS := linux
%i386.deb %i386.rpm %linux_i386.tar.gz: export GOARCH := 386
%armel.deb %armel.rpm %linux_armel.tar.gz: export GOOS := linux
%armel.deb %armel.rpm %linux_armel.tar.gz: export GOARCH := arm
%armel.deb %armel.rpm %linux_armel.tar.gz: export GOARM := 5
%armhf.deb %armv6hl.rpm %linux_armhf.tar.gz: export GOOS := linux
%armhf.deb %armv6hl.rpm %linux_armhf.tar.gz: export GOARCH := arm
%armhf.deb %armv6hl.rpm %linux_armhf.tar.gz: export GOARM := 6
%arm64.deb %aarch64.rpm %linux_arm64.tar.gz: export GOOS := linux
%arm64.deb %aarch64.rpm %linux_arm64.tar.gz: export GOARCH := arm64
%arm64.deb %aarch64.rpm %linux_arm64.tar.gz: export GOARM := 7
%mips.deb %linux_mips.tar.gz: export GOOS := linux
%mips.deb %linux_mips.tar.gz: export GOARCH := mips
%mipsel.deb %linux_mipsel.tar.gz: export GOOS := linux
%mipsel.deb %linux_mipsel.tar.gz: export GOARCH := mipsle
%s390x.deb %s390x.rpm %linux_s390x.tar.gz: export GOOS := linux
%s390x.deb %s390x.rpm %linux_s390x.tar.gz: export GOARCH := s390x
%freebsd_amd64.tar.gz: export GOOS := freebsd
%freebsd_amd64.tar.gz: export GOARCH := amd64
%freebsd_i386.tar.gz: export GOOS := freebsd
%freebsd_i386.tar.gz: export GOARCH := 386
%windows_amd64.zip: export GOOS := windows
%windows_amd64.zip: export GOARCH := amd64
%windows_i386.zip: export GOOS := windows
%windows_i386.zip: export GOARCH := 386
%windows_i386.zip %windows_amd64.zip: export prefix =
%windows_i386.zip %windows_amd64.zip: export bindir = $(prefix)
%windows_i386.zip %windows_amd64.zip: export sysconfdir = $(prefix)
%windows_i386.zip %windows_amd64.zip: export localstatedir = $(prefix)
%windows_i386.zip %windows_amd64.zip: export EXEEXT := .exe
%.deb: export pkg := deb
%.deb: export prefix := /usr
%.deb: export conf_suffix := .sample
%.deb: export sysconfdir := /etc
%.deb: export localstatedir := /var
%.rpm: export pkg := rpm
%.rpm: export prefix := /usr
%.rpm: export sysconfdir := /etc
%.rpm: export localstatedir := /var
%.tar.gz: export pkg := tar
%.tar.gz: export prefix := /usr
%.tar.gz: export sysconfdir := /etc
%.tar.gz: export localstatedir := /var
%.zip: export pkg := zip
%.zip: export prefix := /
%.deb %.rpm %.tar.gz %.zip: export DESTDIR = build/$(GOOS)-$(GOARCH)$(GOARM)$(cgo)-$(pkg)/telegraf-$(version)
%.deb %.rpm %.tar.gz %.zip: export buildbin = build/$(GOOS)-$(GOARCH)$(GOARM)$(cgo)/telegraf$(EXEEXT)
%.deb %.rpm %.tar.gz %.zip: export LDFLAGS = -w -s

View File

@ -1,959 +0,0 @@
#!/usr/bin/python -u
import sys
import os
import subprocess
from datetime import datetime
import shutil
import tempfile
import hashlib
import re
import logging
import argparse
################
#### Telegraf Variables
################
# Packaging variables
PACKAGE_NAME = "telegraf"
USER = "telegraf"
GROUP = "telegraf"
INSTALL_ROOT_DIR = "/usr/bin"
LOG_DIR = "/var/log/telegraf"
SCRIPT_DIR = "/usr/lib/telegraf/scripts"
CONFIG_DIR = "/etc/telegraf"
CONFIG_DIR_D = "/etc/telegraf/telegraf.d"
LOGROTATE_DIR = "/etc/logrotate.d"
INIT_SCRIPT = "scripts/init.sh"
SYSTEMD_SCRIPT = "scripts/telegraf.service"
LOGROTATE_SCRIPT = "etc/logrotate.d/telegraf"
DEFAULT_CONFIG = "etc/telegraf.conf"
DEFAULT_WINDOWS_CONFIG = "etc/telegraf_windows.conf"
POSTINST_SCRIPT = "scripts/post-install.sh"
PREINST_SCRIPT = "scripts/pre-install.sh"
POSTREMOVE_SCRIPT = "scripts/post-remove.sh"
PREREMOVE_SCRIPT = "scripts/pre-remove.sh"
# Default AWS S3 bucket for uploads
DEFAULT_BUCKET = "dl.influxdata.com/telegraf/artifacts"
CONFIGURATION_FILES = [
CONFIG_DIR + '/telegraf.conf',
LOGROTATE_DIR + '/telegraf',
]
# META-PACKAGE VARIABLES
PACKAGE_LICENSE = "MIT"
PACKAGE_URL = "https://github.com/influxdata/telegraf"
MAINTAINER = "support@influxdb.com"
VENDOR = "InfluxData"
DESCRIPTION = "Plugin-driven server agent for reporting metrics into InfluxDB."
# SCRIPT START
prereqs = ['git', 'go']
go_vet_command = "go tool vet -composites=true ./"
optional_prereqs = ['gvm', 'fpm', 'rpmbuild']
fpm_common_args = "-f -s dir --log error \
--vendor {} \
--url {} \
--license {} \
--maintainer {} \
--config-files {} \
--config-files {} \
--after-install {} \
--before-install {} \
--after-remove {} \
--before-remove {} \
--rpm-attr 755,{},{}:{} \
--description \"{}\"".format(
VENDOR,
PACKAGE_URL,
PACKAGE_LICENSE,
MAINTAINER,
CONFIG_DIR + '/telegraf.conf.sample',
LOGROTATE_DIR + '/telegraf',
POSTINST_SCRIPT,
PREINST_SCRIPT,
POSTREMOVE_SCRIPT,
PREREMOVE_SCRIPT,
USER, GROUP, LOG_DIR,
DESCRIPTION)
targets = {
'telegraf': './cmd/telegraf',
}
supported_builds = {
'darwin': ["amd64"],
"windows": ["amd64", "i386"],
"linux": ["amd64", "i386", "armhf", "armel", "arm64", "static_amd64", "s390x", "mipsel", "mips"],
"freebsd": ["amd64", "i386"]
}
supported_packages = {
"darwin": ["tar"],
"linux": ["deb", "rpm", "tar"],
"windows": ["zip"],
"freebsd": ["tar"]
}
next_version = '1.15.0'
################
#### Telegraf Functions
################
def print_banner():
logging.info("""
_____ _ __
/__ \\___| | ___ __ _ _ __ __ _ / _|
/ /\\/ _ \\ |/ _ \\/ _` | '__/ _` | |_
/ / | __/ | __/ (_| | | | (_| | _|
\\/ \\___|_|\\___|\\__, |_| \\__,_|_|
|___/
Build Script
""")
def create_package_fs(build_root):
"""Create a filesystem structure to mimic the package filesystem.
"""
logging.debug("Creating a filesystem hierarchy from directory: {}".format(build_root))
# Using [1:] for the path names due to them being absolute
# (will overwrite previous paths, per 'os.path.join' documentation)
dirs = [INSTALL_ROOT_DIR[1:], LOG_DIR[1:], SCRIPT_DIR[1:], CONFIG_DIR[1:], LOGROTATE_DIR[1:], CONFIG_DIR_D[1:]]
for d in dirs:
os.makedirs(os.path.join(build_root, d))
os.chmod(os.path.join(build_root, d), 0o755)
def package_scripts(build_root, config_only=False, windows=False):
"""Copy the necessary scripts and configuration files to the package
filesystem.
"""
if config_only or windows:
logging.info("Copying configuration to build directory")
if windows:
shutil.copyfile(DEFAULT_WINDOWS_CONFIG, os.path.join(build_root, "telegraf.conf.sample"))
else:
shutil.copyfile(DEFAULT_CONFIG, os.path.join(build_root, "telegraf.conf.sample"))
os.chmod(os.path.join(build_root, "telegraf.conf.sample"), 0o644)
else:
logging.info("Copying scripts and configuration to build directory")
shutil.copyfile(INIT_SCRIPT, os.path.join(build_root, SCRIPT_DIR[1:], INIT_SCRIPT.split('/')[1]))
os.chmod(os.path.join(build_root, SCRIPT_DIR[1:], INIT_SCRIPT.split('/')[1]), 0o644)
shutil.copyfile(SYSTEMD_SCRIPT, os.path.join(build_root, SCRIPT_DIR[1:], SYSTEMD_SCRIPT.split('/')[1]))
os.chmod(os.path.join(build_root, SCRIPT_DIR[1:], SYSTEMD_SCRIPT.split('/')[1]), 0o644)
shutil.copyfile(LOGROTATE_SCRIPT, os.path.join(build_root, LOGROTATE_DIR[1:], "telegraf"))
os.chmod(os.path.join(build_root, LOGROTATE_DIR[1:], "telegraf"), 0o644)
shutil.copyfile(DEFAULT_CONFIG, os.path.join(build_root, CONFIG_DIR[1:], "telegraf.conf.sample"))
os.chmod(os.path.join(build_root, CONFIG_DIR[1:], "telegraf.conf.sample"), 0o644)
def run_generate():
# NOOP for Telegraf
return True
def go_get(branch, update=False, no_uncommitted=False):
"""Retrieve build dependencies or restore pinned dependencies.
"""
if local_changes() and no_uncommitted:
logging.error("There are uncommitted changes in the current directory.")
return False
logging.info("Retrieving dependencies...")
run("go mod download")
return True
def run_tests(race, parallel, timeout, no_vet):
# Currently a NOOP for Telegraf
return True
################
#### All Telegraf-specific content above this line
################
def run(command, allow_failure=False, shell=False):
"""Run shell command (convenience wrapper around subprocess).
"""
out = None
logging.debug("{}".format(command))
try:
if shell:
out = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=shell)
else:
out = subprocess.check_output(command.split(), stderr=subprocess.STDOUT)
out = out.decode('utf-8').strip()
# logging.debug("Command output: {}".format(out))
except subprocess.CalledProcessError as e:
if allow_failure:
logging.warning("Command '{}' failed with error: {}".format(command, e.output))
return None
else:
logging.error("Command '{}' failed with error: {}".format(command, e.output))
sys.exit(1)
except OSError as e:
if allow_failure:
logging.warning("Command '{}' failed with error: {}".format(command, e))
return out
else:
logging.error("Command '{}' failed with error: {}".format(command, e))
sys.exit(1)
else:
return out
def create_temp_dir(prefix=None):
""" Create temporary directory with optional prefix.
"""
if prefix is None:
return tempfile.mkdtemp(prefix="{}-build.".format(PACKAGE_NAME))
else:
return tempfile.mkdtemp(prefix=prefix)
def increment_minor_version(version):
"""Return the version with the minor version incremented and patch
version set to zero.
"""
ver_list = version.split('.')
if len(ver_list) != 3:
logging.warning("Could not determine how to increment version '{}', will just use provided version.".format(version))
return version
ver_list[1] = str(int(ver_list[1]) + 1)
ver_list[2] = str(0)
inc_version = '.'.join(ver_list)
logging.debug("Incremented version from '{}' to '{}'.".format(version, inc_version))
return inc_version
def get_current_version_tag():
"""Retrieve the raw git version tag.
"""
version = run("git describe --exact-match --tags 2>/dev/null",
allow_failure=True, shell=True)
return version
def get_current_version():
"""Parse version information from git tag output.
"""
version_tag = get_current_version_tag()
if not version_tag:
return None
# Remove leading 'v'
if version_tag[0] == 'v':
version_tag = version_tag[1:]
# Replace any '-'/'_' with '~'
if '-' in version_tag:
version_tag = version_tag.replace("-", "~")
if '_' in version_tag:
version_tag = version_tag.replace("_", "~")
return version_tag
def get_current_commit(short=False):
"""Retrieve the current git commit.
"""
if short:
command = "git log --pretty=format:'%h' -n 1"
else:
command = "git rev-parse HEAD"
out = run(command)
return out.strip('\'\n\r ')
def get_current_branch():
"""Retrieve the current git branch.
"""
command = "git rev-parse --abbrev-ref HEAD"
out = run(command)
return out.strip()
def local_changes():
"""Return True if there are local un-committed changes.
"""
output = run("git diff-files --ignore-submodules --").strip()
if len(output) > 0:
return True
return False
def get_system_arch():
"""Retrieve current system architecture.
"""
arch = os.uname()[4]
if arch == "x86_64":
arch = "amd64"
elif arch == "386":
arch = "i386"
elif "arm64" in arch:
arch = "arm64"
elif 'arm' in arch:
# Prevent uname from reporting full ARM arch (eg 'armv7l')
arch = "arm"
return arch
def get_system_platform():
"""Retrieve current system platform.
"""
if sys.platform.startswith("linux"):
return "linux"
else:
return sys.platform
def get_go_version():
"""Retrieve version information for Go.
"""
out = run("go version")
matches = re.search('go version go(\S+)', out)
if matches is not None:
return matches.groups()[0].strip()
return None
def check_path_for(b):
"""Check the the user's path for the provided binary.
"""
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
full_path = os.path.join(path, b)
if is_exe(full_path):
return full_path
def check_environ(build_dir=None):
"""Check environment for common Go variables.
"""
logging.info("Checking environment...")
for v in ["GOPATH", "GOBIN", "GOROOT"]:
logging.debug("Using '{}' for {}".format(os.environ.get(v), v))
cwd = os.getcwd()
if build_dir is None and os.environ.get("GOPATH") and os.environ.get("GOPATH") not in cwd:
logging.warning("Your current directory is not under your GOPATH. This may lead to build failures.")
return True
def check_prereqs():
"""Check user path for required dependencies.
"""
logging.info("Checking for dependencies...")
for req in prereqs:
if not check_path_for(req):
logging.error("Could not find dependency: {}".format(req))
return False
return True
def upload_packages(packages, bucket_name=None, overwrite=False):
"""Upload provided package output to AWS S3.
"""
logging.debug("Uploading files to bucket '{}': {}".format(bucket_name, packages))
try:
import boto
from boto.s3.key import Key
from boto.s3.connection import OrdinaryCallingFormat
logging.getLogger("boto").setLevel(logging.WARNING)
except ImportError:
logging.warn("Cannot upload packages without 'boto' Python library!")
return False
logging.info("Connecting to AWS S3...")
# Up the number of attempts to 10 from default of 1
boto.config.add_section("Boto")
boto.config.set("Boto", "metadata_service_num_attempts", "10")
c = boto.connect_s3(calling_format=OrdinaryCallingFormat())
if bucket_name is None:
bucket_name = DEFAULT_BUCKET
bucket = c.get_bucket(bucket_name.split('/')[0])
for p in packages:
if '/' in bucket_name:
# Allow for nested paths within the bucket name (ex:
# bucket/folder). Assuming forward-slashes as path
# delimiter.
name = os.path.join('/'.join(bucket_name.split('/')[1:]),
os.path.basename(p))
else:
name = os.path.basename(p)
logging.debug("Using key: {}".format(name))
if bucket.get_key(name) is None or overwrite:
logging.info("Uploading file {}".format(name))
k = Key(bucket)
k.key = name
if overwrite:
n = k.set_contents_from_filename(p, replace=True)
else:
n = k.set_contents_from_filename(p, replace=False)
k.make_public()
else:
logging.warning("Not uploading file {}, as it already exists in the target bucket.".format(name))
return True
def go_list(vendor=False, relative=False):
"""
Return a list of packages
If vendor is False vendor package are not included
If relative is True the package prefix defined by PACKAGE_URL is stripped
"""
p = subprocess.Popen(["go", "list", "./..."], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
packages = out.split('\n')
if packages[-1] == '':
packages = packages[:-1]
if not vendor:
non_vendor = []
for p in packages:
if '/vendor/' not in p:
non_vendor.append(p)
packages = non_vendor
if relative:
relative_pkgs = []
for p in packages:
r = p.replace(PACKAGE_URL, '.')
if r != '.':
relative_pkgs.append(r)
packages = relative_pkgs
return packages
def build(version=None,
platform=None,
arch=None,
nightly=False,
race=False,
clean=False,
outdir=".",
tags=None,
static=False):
"""Build each target for the specified architecture and platform.
"""
if tags is None:
tags = []
logging.info("Starting build for {}/{}...".format(platform, arch))
logging.info("Using Go version: {}".format(get_go_version()))
logging.info("Using git branch: {}".format(get_current_branch()))
logging.info("Using git commit: {}".format(get_current_commit()))
if static:
logging.info("Using statically-compiled output.")
if race:
logging.info("Race is enabled.")
if len(tags) > 0:
logging.info("Using build tags: {}".format(','.join(tags)))
logging.info("Sending build output to: {}".format(outdir))
if not os.path.exists(outdir):
os.makedirs(outdir)
elif clean and outdir != '/' and outdir != ".":
logging.info("Cleaning build directory '{}' before building.".format(outdir))
shutil.rmtree(outdir)
os.makedirs(outdir)
logging.info("Using version '{}' for build.".format(version))
tmp_build_dir = create_temp_dir()
for target, path in targets.items():
logging.info("Building target: {}".format(target))
build_command = ""
# Handle static binary output
if static is True or "static_" in arch:
if "static_" in arch:
static = True
arch = arch.replace("static_", "")
build_command += "CGO_ENABLED=0 "
# Handle variations in architecture output
goarch = arch
if arch == "i386" or arch == "i686":
goarch = "386"
elif "arm64" in arch:
goarch = "arm64"
elif "arm" in arch:
goarch = "arm"
elif arch == "mipsel":
goarch = "mipsle"
build_command += "GOOS={} GOARCH={} ".format(platform, goarch)
if "arm" in arch:
if arch == "armel":
build_command += "GOARM=5 "
elif arch == "armhf" or arch == "arm":
build_command += "GOARM=6 "
elif arch == "arm64":
# TODO(rossmcdonald) - Verify this is the correct setting for arm64
build_command += "GOARM=7 "
else:
logging.error("Invalid ARM architecture specified: {}".format(arch))
logging.error("Please specify either 'armel', 'armhf', or 'arm64'.")
return False
if platform == 'windows':
target = target + '.exe'
build_command += "go build -o {} ".format(os.path.join(outdir, target))
if race:
build_command += "-race "
if len(tags) > 0:
build_command += "-tags {} ".format(','.join(tags))
ldflags = [
'-w', '-s',
'-X', 'main.branch={}'.format(get_current_branch()),
'-X', 'main.commit={}'.format(get_current_commit(short=True))]
if version:
ldflags.append('-X')
ldflags.append('main.version={}'.format(version))
build_command += ' -ldflags="{}" '.format(' '.join(ldflags))
if static:
build_command += " -a -installsuffix cgo "
build_command += path
start_time = datetime.utcnow()
run(build_command, shell=True)
end_time = datetime.utcnow()
logging.info("Time taken: {}s".format((end_time - start_time).total_seconds()))
return True
def generate_sha256_from_file(path):
"""Generate SHA256 hash signature based on the contents of the file at path.
"""
m = hashlib.sha256()
with open(path, 'rb') as f:
m.update(f.read())
return m.hexdigest()
def generate_sig_from_file(path):
"""Generate a detached GPG signature from the file at path.
"""
logging.debug("Generating GPG signature for file: {}".format(path))
gpg_path = check_path_for('gpg')
if gpg_path is None:
logging.warning("gpg binary not found on path! Skipping signature creation.")
return False
if os.environ.get("GNUPG_HOME") is not None:
run('gpg --homedir {} --armor --yes --detach-sign {}'.format(os.environ.get("GNUPG_HOME"), path))
else:
run('gpg --armor --detach-sign --yes {}'.format(path))
return True
def package(build_output, pkg_name, version, nightly=False, iteration=1, static=False, release=False):
"""Package the output of the build process.
"""
outfiles = []
tmp_build_dir = create_temp_dir()
logging.debug("Packaging for build output: {}".format(build_output))
logging.info("Using temporary directory: {}".format(tmp_build_dir))
try:
for platform in build_output:
# Create top-level folder displaying which platform (linux, etc)
os.makedirs(os.path.join(tmp_build_dir, platform))
for arch in build_output[platform]:
logging.info("Creating packages for {}/{}".format(platform, arch))
# Create second-level directory displaying the architecture (amd64, etc)
current_location = build_output[platform][arch]
# Create directory tree to mimic file system of package
build_root = os.path.join(tmp_build_dir,
platform,
arch,
PACKAGE_NAME)
os.makedirs(build_root)
# Copy packaging scripts to build directory
if platform == "windows":
# For windows and static builds, just copy
# binaries to root of package (no other scripts or
# directories)
package_scripts(build_root, config_only=True, windows=True)
elif static or "static_" in arch:
package_scripts(build_root, config_only=True)
else:
create_package_fs(build_root)
package_scripts(build_root)
for binary in targets:
# Copy newly-built binaries to packaging directory
if platform == 'windows':
binary = binary + '.exe'
if platform == 'windows' or static or "static_" in arch:
# Where the binary should go in the package filesystem
to = os.path.join(build_root, binary)
# Where the binary currently is located
fr = os.path.join(current_location, binary)
else:
# Where the binary currently is located
fr = os.path.join(current_location, binary)
# Where the binary should go in the package filesystem
to = os.path.join(build_root, INSTALL_ROOT_DIR[1:], binary)
shutil.copy(fr, to)
for package_type in supported_packages[platform]:
if package_type == "rpm" and arch in ["mipsel", "mips"]:
continue
# Package the directory structure for each package type for the platform
logging.debug("Packaging directory '{}' as '{}'.".format(build_root, package_type))
name = pkg_name
# Reset version, iteration, and current location on each run
# since they may be modified below.
package_version = version
package_iteration = iteration
if "static_" in arch:
# Remove the "static_" from the displayed arch on the package
package_arch = arch.replace("static_", "")
elif package_type == "rpm" and arch == 'armhf':
package_arch = 'armv6hl'
elif package_type == "rpm" and arch == 'arm64':
# Adjust arm64 rpm package arch
package_arch = 'aarch64'
else:
package_arch = arch
if not version:
package_version = "{}~{}".format(next_version, get_current_commit(short=True))
package_iteration = "0"
package_build_root = build_root
current_location = build_output[platform][arch]
if package_type in ['zip', 'tar']:
# For tars and zips, start the packaging one folder above
# the build root (to include the package name)
package_build_root = os.path.join('/', '/'.join(build_root.split('/')[:-1]))
if nightly:
if static or "static_" in arch:
name = '{}-static-nightly_{}_{}'.format(name,
platform,
package_arch)
else:
name = '{}-nightly_{}_{}'.format(name,
platform,
package_arch)
else:
if static or "static_" in arch:
name = '{}-{}-static_{}_{}'.format(name,
package_version,
platform,
package_arch)
else:
name = '{}-{}_{}_{}'.format(name,
package_version,
platform,
package_arch)
current_location = os.path.join(os.getcwd(), current_location)
if package_type == 'tar':
tar_command = "cd {} && tar -cvzf {}.tar.gz ./*".format(package_build_root, name)
run(tar_command, shell=True)
run("mv {}.tar.gz {}".format(os.path.join(package_build_root, name), current_location), shell=True)
outfile = os.path.join(current_location, name + ".tar.gz")
outfiles.append(outfile)
elif package_type == 'zip':
zip_command = "cd {} && zip -r {}.zip ./*".format(package_build_root, name)
run(zip_command, shell=True)
run("mv {}.zip {}".format(os.path.join(package_build_root, name), current_location), shell=True)
outfile = os.path.join(current_location, name + ".zip")
outfiles.append(outfile)
elif package_type not in ['zip', 'tar'] and static or "static_" in arch:
logging.info("Skipping package type '{}' for static builds.".format(package_type))
else:
if package_type == 'rpm' and release and '~' in package_version:
package_version, suffix = package_version.split('~', 1)
# The ~ indicates that this is a prerelease so we give it a leading 0.
package_iteration = "0.%s" % suffix
fpm_command = "fpm {} --name {} -a {} -t {} --version {} --iteration {} -C {} -p {} ".format(
fpm_common_args,
name,
package_arch,
package_type,
package_version,
package_iteration,
package_build_root,
current_location)
if package_type == "rpm":
fpm_command += "--directories /var/log/telegraf --directories /etc/telegraf --depends coreutils --depends shadow-utils --rpm-posttrans {}".format(POSTINST_SCRIPT)
out = run(fpm_command, shell=True)
matches = re.search(':path=>"(.*)"', out)
outfile = None
if matches is not None:
outfile = matches.groups()[0]
if outfile is None:
logging.warning("Could not determine output from packaging output!")
else:
if nightly:
# Strip nightly version from package name
new_outfile = outfile.replace("{}-{}".format(package_version, package_iteration), "nightly")
os.rename(outfile, new_outfile)
outfile = new_outfile
else:
if package_type == 'rpm':
# rpm's convert any dashes to underscores
package_version = package_version.replace("-", "_")
outfiles.append(os.path.join(os.getcwd(), outfile))
logging.debug("Produced package files: {}".format(outfiles))
return outfiles
finally:
# Cleanup
shutil.rmtree(tmp_build_dir)
def main(args):
global PACKAGE_NAME
if args.release and args.nightly:
logging.error("Cannot be both a nightly and a release.")
return 1
if args.nightly:
args.iteration = 0
# Pre-build checks
check_environ()
if not check_prereqs():
return 1
if args.build_tags is None:
args.build_tags = []
else:
args.build_tags = args.build_tags.split(',')
orig_commit = get_current_commit(short=True)
orig_branch = get_current_branch()
if args.platform not in supported_builds and args.platform != 'all':
logging.error("Invalid build platform: {}".format(args.platform))
return 1
build_output = {}
if args.branch != orig_branch and args.commit != orig_commit:
logging.error("Can only specify one branch or commit to build from.")
return 1
elif args.branch != orig_branch:
logging.info("Moving to git branch: {}".format(args.branch))
run("git checkout {}".format(args.branch))
elif args.commit != orig_commit:
logging.info("Moving to git commit: {}".format(args.commit))
run("git checkout {}".format(args.commit))
if not args.no_get:
if not go_get(args.branch, update=args.update, no_uncommitted=args.no_uncommitted):
return 1
if args.generate:
if not run_generate():
return 1
if args.test:
if not run_tests(args.race, args.parallel, args.timeout, args.no_vet):
return 1
platforms = []
single_build = True
if args.platform == 'all':
platforms = supported_builds.keys()
single_build = False
else:
platforms = [args.platform]
for platform in platforms:
build_output.update({platform: {}})
archs = []
if args.arch == "all":
single_build = False
archs = supported_builds.get(platform)
else:
archs = [args.arch]
for arch in archs:
od = args.outdir
if not single_build:
od = os.path.join(args.outdir, platform, arch)
if not build(version=args.version,
platform=platform,
arch=arch,
nightly=args.nightly,
race=args.race,
clean=args.clean,
outdir=od,
tags=args.build_tags,
static=args.static):
return 1
build_output.get(platform).update({arch: od})
# Build packages
if args.package:
if not check_path_for("fpm"):
logging.error("FPM ruby gem required for packaging. Stopping.")
return 1
packages = package(build_output,
args.name,
args.version,
nightly=args.nightly,
iteration=args.iteration,
static=args.static,
release=args.release)
if args.sign:
logging.debug("Generating GPG signatures for packages: {}".format(packages))
sigs = [] # retain signatures so they can be uploaded with packages
for p in packages:
if generate_sig_from_file(p):
sigs.append(p + '.asc')
else:
logging.error("Creation of signature for package [{}] failed!".format(p))
return 1
packages += sigs
if args.upload:
logging.debug("Files staged for upload: {}".format(packages))
if args.nightly:
args.upload_overwrite = True
if not upload_packages(packages, bucket_name=args.bucket, overwrite=args.upload_overwrite):
return 1
logging.info("Packages created:")
for filename in packages:
logging.info("%s (SHA256=%s)",
os.path.basename(filename),
generate_sha256_from_file(filename))
if orig_branch != get_current_branch():
logging.info("Moving back to original git branch: {}".format(args.branch))
run("git checkout {}".format(orig_branch))
return 0
if __name__ == '__main__':
LOG_LEVEL = logging.INFO
if '--debug' in sys.argv[1:]:
LOG_LEVEL = logging.DEBUG
log_format = '[%(levelname)s] %(funcName)s: %(message)s'
logging.basicConfig(level=LOG_LEVEL,
format=log_format)
parser = argparse.ArgumentParser(description='InfluxDB build and packaging script.')
parser.add_argument('--verbose', '-v', '--debug',
action='store_true',
help='Use debug output')
parser.add_argument('--outdir', '-o',
metavar='<output directory>',
default='./build/',
type=os.path.abspath,
help='Output directory')
parser.add_argument('--name', '-n',
metavar='<name>',
default=PACKAGE_NAME,
type=str,
help='Name to use for package name (when package is specified)')
parser.add_argument('--arch',
metavar='<amd64|i386|armhf|arm64|armel|all>',
type=str,
default=get_system_arch(),
help='Target architecture for build output')
parser.add_argument('--platform',
metavar='<linux|darwin|windows|all>',
type=str,
default=get_system_platform(),
help='Target platform for build output')
parser.add_argument('--branch',
metavar='<branch>',
type=str,
default=get_current_branch(),
help='Build from a specific branch')
parser.add_argument('--commit',
metavar='<commit>',
type=str,
default=get_current_commit(short=True),
help='Build from a specific commit')
parser.add_argument('--version',
metavar='<version>',
type=str,
default=get_current_version(),
help='Version information to apply to build output (ex: 0.12.0)')
parser.add_argument('--iteration',
metavar='<package iteration>',
type=str,
default="1",
help='Package iteration to apply to build output (defaults to 1)')
parser.add_argument('--stats',
action='store_true',
help='Emit build metrics (requires InfluxDB Python client)')
parser.add_argument('--stats-server',
metavar='<hostname:port>',
type=str,
help='Send build stats to InfluxDB using provided hostname and port')
parser.add_argument('--stats-db',
metavar='<database name>',
type=str,
help='Send build stats to InfluxDB using provided database name')
parser.add_argument('--nightly',
action='store_true',
help='Mark build output as nightly build (will increment the minor version)')
parser.add_argument('--update',
action='store_true',
help='Update build dependencies prior to building')
parser.add_argument('--package',
action='store_true',
help='Package binary output')
parser.add_argument('--release',
action='store_true',
help='Mark build output as release')
parser.add_argument('--clean',
action='store_true',
help='Clean output directory before building')
parser.add_argument('--no-get',
action='store_true',
help='Do not retrieve pinned dependencies when building')
parser.add_argument('--no-uncommitted',
action='store_true',
help='Fail if uncommitted changes exist in the working directory')
parser.add_argument('--upload',
action='store_true',
help='Upload output packages to AWS S3')
parser.add_argument('--upload-overwrite', '-w',
action='store_true',
help='Upload output packages to AWS S3')
parser.add_argument('--bucket',
metavar='<S3 bucket name>',
type=str,
default=DEFAULT_BUCKET,
help='Destination bucket for uploads')
parser.add_argument('--generate',
action='store_true',
help='Run "go generate" before building')
parser.add_argument('--build-tags',
metavar='<tags>',
help='Optional build tags to use for compilation')
parser.add_argument('--static',
action='store_true',
help='Create statically-compiled binary output')
parser.add_argument('--sign',
action='store_true',
help='Create GPG detached signatures for packages (when package is specified)')
parser.add_argument('--test',
action='store_true',
help='Run tests (does not produce build output)')
parser.add_argument('--no-vet',
action='store_true',
help='Do not run "go vet" when running tests')
parser.add_argument('--race',
action='store_true',
help='Enable race flag for build output')
parser.add_argument('--parallel',
metavar='<num threads>',
type=int,
help='Number of tests to run simultaneously')
parser.add_argument('--timeout',
metavar='<timeout>',
type=str,
help='Timeout for tests before failing')
args = parser.parse_args()
print_banner()
sys.exit(main(args))

View File

@ -1,4 +1,4 @@
FROM golang:1.13.11
FROM golang:1.13.12
RUN chmod -R 755 "$GOPATH"
@ -9,7 +9,7 @@ RUN DEBIAN_FRONTEND=noninteractive \
libtool \
locales \
make \
python-boto \
awscli \
rpm \
ruby \
ruby-dev \

View File

@ -1,4 +1,4 @@
FROM golang:1.14.3
FROM golang:1.14.4
RUN chmod -R 755 "$GOPATH"
@ -9,7 +9,7 @@ RUN DEBIAN_FRONTEND=noninteractive \
libtool \
locales \
make \
python-boto \
awscli \
rpm \
ruby \
ruby-dev \

View File

@ -0,0 +1,68 @@
#!/bin/bash
BIN_DIR=/usr/bin
LOG_DIR=/var/log/telegraf
SCRIPT_DIR=/usr/lib/telegraf/scripts
LOGROTATE_DIR=/etc/logrotate.d
function install_init {
cp -f $SCRIPT_DIR/init.sh /etc/init.d/telegraf
chmod +x /etc/init.d/telegraf
}
function install_systemd {
cp -f $SCRIPT_DIR/telegraf.service $1
systemctl enable telegraf || true
systemctl daemon-reload || true
}
function install_update_rcd {
update-rc.d telegraf defaults
}
function install_chkconfig {
chkconfig --add telegraf
}
# Remove legacy symlink, if it exists
if [[ -L /etc/init.d/telegraf ]]; then
rm -f /etc/init.d/telegraf
fi
# Remove legacy symlink, if it exists
if [[ -L /etc/systemd/system/telegraf.service ]]; then
rm -f /etc/systemd/system/telegraf.service
fi
# Add defaults file, if it doesn't exist
if [[ ! -f /etc/default/telegraf ]]; then
touch /etc/default/telegraf
fi
# Add .d configuration directory
if [[ ! -d /etc/telegraf/telegraf.d ]]; then
mkdir -p /etc/telegraf/telegraf.d
fi
# If 'telegraf.conf' is not present use package's sample (fresh install)
if [[ ! -f /etc/telegraf/telegraf.conf ]] && [[ -f /etc/telegraf/telegraf.conf.sample ]]; then
cp /etc/telegraf/telegraf.conf.sample /etc/telegraf/telegraf.conf
fi
test -d $LOG_DIR || mkdir -p $LOG_DIR
chown -R -L telegraf:telegraf $LOG_DIR
chmod 755 $LOG_DIR
if [[ "$(readlink /proc/1/exe)" == */systemd ]]; then
install_systemd /lib/systemd/system/telegraf.service
deb-systemd-invoke restart telegraf.service || echo "WARNING: systemd not running."
else
# Assuming SysVinit
install_init
# Run update-rc.d or fallback to chkconfig if not available
if which update-rc.d &>/dev/null; then
install_update_rcd
else
install_chkconfig
fi
invoke-rc.d telegraf restart
fi

View File

@ -0,0 +1,33 @@
#!/bin/bash
function disable_systemd {
systemctl disable telegraf
rm -f $1
}
function disable_update_rcd {
update-rc.d -f telegraf remove
rm -f /etc/init.d/telegraf
}
function disable_chkconfig {
chkconfig --del telegraf
rm -f /etc/init.d/telegraf
}
if [ "$1" == "remove" -o "$1" == "purge" ]; then
# Remove/purge
rm -f /etc/default/telegraf
if [[ "$(readlink /proc/1/exe)" == */systemd ]]; then
disable_systemd /lib/systemd/system/telegraf.service
else
# Assuming sysv
# Run update-rc.d or fallback to chkconfig if not available
if which update-rc.d &>/dev/null; then
disable_update_rcd
else
disable_chkconfig
fi
fi
fi

10
scripts/deb/pre-remove.sh Normal file
View File

@ -0,0 +1,10 @@
#!/bin/bash
BIN_DIR=/usr/bin
if [[ "$(readlink /proc/1/exe)" == */systemd ]]; then
deb-systemd-invoke stop telegraf.service
else
# Assuming sysv
invoke-rc.d telegraf stop
fi

View File

@ -1,14 +0,0 @@
#!/bin/bash
BIN_DIR=/usr/bin
# Distribution-specific logic
if [[ -f /etc/debian_version ]]; then
# Debian/Ubuntu logic
if [[ "$(readlink /proc/1/exe)" == */systemd ]]; then
deb-systemd-invoke stop telegraf.service
else
# Assuming sysv
invoke-rc.d telegraf stop
fi
fi

111
scripts/release.sh Normal file
View File

@ -0,0 +1,111 @@
#!/bin/sh
#
# usage: release.sh BUILD_NUM
#
# Requirements:
# - curl
# - jq
# - sha256sum
# - awscli
# - gpg
#
# CIRCLE_TOKEN set to a CircleCI API token that can list the artifacts.
#
# AWS cli setup to be able to write to the BUCKET.
#
# GPG setup with a signing key.
BUILD_NUM="${1:?usage: release.sh BUILD_NUM}"
BUCKET="${2:-dl.influxdata.com/telegraf/releases}"
: ${CIRCLE_TOKEN:?"Must set CIRCLE_TOKEN"}
tmpdir="$(mktemp -d -t telegraf.XXXXXXXXXX)"
on_exit() {
rm -rf "$tmpdir"
}
trap on_exit EXIT
echo "${tmpdir}"
cd "${tmpdir}" || exit 1
curl -s -S -H Circle-Token:${CIRCLE_TOKEN} \
"https://circleci.com/api/v2/project/gh/influxdata/telegraf/${BUILD_NUM}/artifacts" \
-o artifacts || exit 1
cat artifacts | jq -r '.items[] | "\(.url) \(.path|ltrimstr("build/dist/"))"' > manifest
while read url path;
do
echo $url
curl -s -S -o "$path" "$url" &&
sha256sum "$path" > "$path.DIGESTS" &&
gpg --armor --detach-sign "$path.DIGESTS" &&
gpg --armor --detach-sign "$path" || exit 1
done < manifest
echo
cat *.DIGESTS
echo
arch() {
case ${1} in
*i386.*)
echo i386;;
*armel.*)
echo armel;;
*armv6hl.*)
echo armv6hl;;
*armhf.*)
echo armhf;;
*arm64.* | *aarch64.*)
echo arm64;;
*amd64.* | *x86_64.*)
echo amd64;;
*s390x.*)
echo s390x;;
*mipsel.*)
echo mipsel;;
*mips.*)
echo mips;;
*)
echo unknown
esac
}
platform() {
case ${1} in
*".rpm")
echo Centos;;
*".deb")
echo Debian;;
*"linux"*)
echo Linux;;
*"freebsd"*)
echo FreeBSD;;
*"darwin"*)
echo Mac OS X;;
*"windows"*)
echo Windows;;
*)
echo unknown;;
esac
}
echo "Arch | Platform | Package | SHA256"
echo "---| --- | --- | ---"
while read url path;
do
echo "$(arch ${path}) | $(platform ${path}) | [\`${path}\`](https://dl.influxdata.com/telegraf/releases/${path}) | \`$(sha256sum ${path} | cut -f1 -d' ')\`"
done < manifest
echo ""
aws s3 sync ./ "s3://$BUCKET/" \
--exclude "*" \
--include "*.tar.gz" \
--include "*.deb" \
--include "*.rpm" \
--include "*.zip" \
--include "*.asc" \
--acl public-read

View File

@ -43,11 +43,6 @@ if [[ ! -d /etc/telegraf/telegraf.d ]]; then
mkdir -p /etc/telegraf/telegraf.d
fi
# If 'telegraf.conf' is not present use package's sample (fresh install)
if [[ ! -f /etc/telegraf/telegraf.conf ]] && [[ -f /etc/telegraf/telegraf.conf.sample ]]; then
cp /etc/telegraf/telegraf.conf.sample /etc/telegraf/telegraf.conf
fi
# Distribution-specific logic
if [[ -f /etc/redhat-release ]] || [[ -f /etc/SuSE-release ]]; then
# RHEL-variant logic
@ -63,30 +58,6 @@ if [[ -f /etc/redhat-release ]] || [[ -f /etc/SuSE-release ]]; then
install_chkconfig
fi
fi
elif [[ -f /etc/debian_version ]]; then
# Debian/Ubuntu logic
# Ownership for RH-based platforms is set in build.py via the `rmp-attr` option.
# We perform ownership change only for Debian-based systems.
# Moving these lines out of this if statement would make `rmp -V` fail after installation.
test -d $LOG_DIR || mkdir -p $LOG_DIR
chown -R -L telegraf:telegraf $LOG_DIR
chmod 755 $LOG_DIR
if [[ "$(readlink /proc/1/exe)" == */systemd ]]; then
install_systemd /lib/systemd/system/telegraf.service
deb-systemd-invoke restart telegraf.service || echo "WARNING: systemd not running."
else
# Assuming SysVinit
install_init
# Run update-rc.d or fallback to chkconfig if not available
if which update-rc.d &>/dev/null; then
install_update_rcd
else
install_chkconfig
fi
invoke-rc.d telegraf restart
fi
elif [[ -f /etc/os-release ]]; then
source /etc/os-release
if [[ "$NAME" = "Amazon Linux" ]]; then

View File

@ -28,24 +28,6 @@ if [[ -f /etc/redhat-release ]] || [[ -f /etc/SuSE-release ]]; then
disable_chkconfig
fi
fi
elif [[ -f /etc/debian_version ]]; then
# Debian/Ubuntu logic
if [ "$1" == "remove" -o "$1" == "purge" ]; then
# Remove/purge
rm -f /etc/default/telegraf
if [[ "$(readlink /proc/1/exe)" == */systemd ]]; then
disable_systemd /lib/systemd/system/telegraf.service
else
# Assuming sysv
# Run update-rc.d or fallback to chkconfig if not available
if which update-rc.d &>/dev/null; then
disable_update_rcd
else
disable_chkconfig
fi
fi
fi
elif [[ -f /etc/os-release ]]; then
source /etc/os-release
if [[ "$ID" = "amzn" ]] && [[ "$1" = "0" ]]; then

View File

@ -0,0 +1,24 @@
#!/bin/bash
if ! grep "^telegraf:" /etc/group &>/dev/null; then
groupadd -r telegraf
fi
if ! id telegraf &>/dev/null; then
useradd -r -M telegraf -s /bin/false -d /etc/telegraf -g telegraf
fi
if [[ -d /etc/opt/telegraf ]]; then
# Legacy configuration found
if [[ ! -d /etc/telegraf ]]; then
# New configuration does not exist, move legacy configuration to new location
echo -e "Please note, Telegraf's configuration is now located at '/etc/telegraf' (previously '/etc/opt/telegraf')."
mv -vn /etc/opt/telegraf /etc/telegraf
if [[ -f /etc/telegraf/telegraf.conf ]]; then
backup_name="telegraf.conf.$(date +%s).backup"
echo "A backup of your current configuration can be found at: /etc/telegraf/${backup_name}"
cp -a "/etc/telegraf/telegraf.conf" "/etc/telegraf/${backup_name}"
fi
fi
fi