mirror of
https://github.com/prometheus/prometheus.git
synced 2025-03-05 20:59:13 -08:00
Simplify makefiles.
This removes the dependancy on C leveldb and snappy. It also takes care of fewer dependencies as they would anyway not work on any non-Debian, non-Brew system. Change-Id: Ia70dce1ba8a816a003587927e0b3a3f8ad2fd28c
This commit is contained in:
parent
9ea808cd8b
commit
006b5517e2
122
.build/Makefile
122
.build/Makefile
|
@ -15,127 +15,7 @@
|
||||||
|
|
||||||
include ../Makefile.INCLUDE
|
include ../Makefile.INCLUDE
|
||||||
|
|
||||||
all: dependencies-stamp
|
all:
|
||||||
|
|
||||||
bison-stamp: bison-implementation-$(UNAME)-stamp
|
|
||||||
[ -x "$$(which bison)" ] || { echo "bison not found." ; false ; }
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
bison-implementation-Darwin-stamp:
|
|
||||||
[ -x "$$(which bison)" ] || $(BREW_INSTALL) bison
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
bison-implementation-Linux-stamp:
|
|
||||||
[ -x "$$(which bison)" ] || $(APT_GET_INSTALL) bison
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
cache-stamp:
|
|
||||||
$(MAKE) -C cache
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
cc-stamp: cc-implementation-$(UNAME)-stamp
|
|
||||||
[ -x "$$(which cc)" ] || { echo "cc not found." ; false ; }
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
cc-implementation-Darwin-stamp:
|
|
||||||
[ -x "$$(which cc)" ] || { echo "Install XCode?" ; false ; }
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
cc-implementation-Linux-stamp:
|
|
||||||
[ -x "$$(which cc)" ] || $(APT_GET_INSTALL) build-essential
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
dependencies-stamp: cache-stamp cc-stamp leveldb-stamp snappy-stamp godns-stamp goleveldb-stamp
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
goprotobuf-protoc-gen-go-stamp: protoc-stamp goprotobuf-stamp
|
|
||||||
$(GO_GET) code.google.com/p/goprotobuf/protoc-gen-go $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
goprotobuf-stamp: protoc-stamp
|
|
||||||
$(GO_GET) code.google.com/p/goprotobuf/proto $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
godns-stamp:
|
|
||||||
$(GO_GET) github.com/miekg/dns $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
goleveldb-stamp:
|
|
||||||
$(GO_GET) github.com/syndtr/goleveldb/leveldb $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
leveldb-stamp: cache-stamp cache/leveldb-$(LEVELDB_VERSION).tar.gz cc-stamp rsync-stamp snappy-stamp
|
|
||||||
tar xzvf cache/leveldb-$(LEVELDB_VERSION).tar.gz -C dirty $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
cd dirty/leveldb-$(LEVELDB_VERSION) && CFLAGS="$(CFLAGS) -lsnappy" CXXFLAGS="$(CXXFLAGS) -lsnappy $(LDFLAGS)" LDFLAGS="-lsnappy $(LDFLAGS)" bash -x ./build_detect_platform build_config.mk ./
|
|
||||||
# The test that LevelDB uses to test for Snappy is naive and
|
|
||||||
# does not respect LDFLAGS. :-(
|
|
||||||
CFLAGS="$(CFLAGS) -lsnappy" CXXFLAGS="$(CXXFLAGS) -lsnappy $(LDFLAGS)" LDFLAGS="-lsnappy $(LDFLAGS)" $(MAKE) -C dirty/leveldb-$(LEVELDB_VERSION) $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
rsync -av "dirty/leveldb-$(LEVELDB_VERSION)/include/" "$(PREFIX)/include/" $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
-[ "$(UNAME)" = "Linux" ] && { rsync -av "dirty/leveldb-$(LEVELDB_VERSION)/"*.*so* "$(PREFIX)/lib/" ; } $(THIRD_PARTY_BUILD_OUTPUT) $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
-[ "$(UNAME)" = "Darwin" ] && { rsync -av "dirty/leveldb-$(LEVELDB_VERSION)/"*.*dylib* "$(PREFIX)/lib/" ; } $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
rsync -av "dirty/leveldb-$(LEVELDB_VERSION)/"*.a "$(PREFIX)/lib/" $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
libunwind-stamp:
|
|
||||||
$(APT_GET_INSTALL) libunwind7
|
|
||||||
$(APT_GET_INSTALL) libunwind7-dev
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
noop-target-stamp:
|
|
||||||
echo "Not doing anything."
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
protoc-stamp: cache-stamp cache/protobuf-$(PROTOCOL_BUFFERS_VERSION).tar.bz2 cc-stamp
|
|
||||||
tar xjvf cache/protobuf-$(PROTOCOL_BUFFERS_VERSION).tar.bz2 -C dirty $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
cd dirty/protobuf-$(PROTOCOL_BUFFERS_VERSION) && ./configure --prefix="$(PREFIX)" $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
$(MAKE) -C dirty/protobuf-$(PROTOCOL_BUFFERS_VERSION) $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
$(MAKE) -C dirty/protobuf-$(PROTOCOL_BUFFERS_VERSION) install $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
[ -x "$$(which protoc)" ] || { echo "protoc not found." ; false ; }
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
rsync-implementation-Darwin-stamp:
|
|
||||||
[ -x "$$(which rsync)" ] || $(BREW_INSTALL) rsync
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
rsync-implementation-Linux-stamp:
|
|
||||||
[ -x "$$(which rsync)" ] || $(APT_GET_INSTALL) rsync
|
|
||||||
|
|
||||||
rsync-stamp: rsync-implementation-$(UNAME)-stamp
|
|
||||||
[ -x "$$(which rsync)" ] || { echo "rsync not found." ; false ; }
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
snappy-stamp: cache-stamp cache/snappy-$(SNAPPY_VERSION).tar.gz cc-stamp
|
|
||||||
tar xzvf cache/snappy-$(SNAPPY_VERSION).tar.gz -C dirty $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
cd dirty/snappy-$(SNAPPY_VERSION) && ./configure --prefix="$(PREFIX)" $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
$(MAKE) -C dirty/snappy-$(SNAPPY_VERSION) $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
$(MAKE) -C dirty/snappy-$(SNAPPY_VERSION) install $(THIRD_PARTY_BUILD_OUTPUT)
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
ifeq ($(UNAME), Linux)
|
|
||||||
stack-unwind-support-stamp: libunwind-stamp
|
|
||||||
touch $@
|
|
||||||
else
|
|
||||||
stack-unwind-support-stamp: noop-target-stamp
|
|
||||||
touch $@
|
|
||||||
endif
|
|
||||||
|
|
||||||
vim-implementation-Darwin-stamp:
|
|
||||||
[ -x "$$(which vim)" ] || $(BREW_INSTALL) vim
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
vim-implementation-Linux-stamp:
|
|
||||||
[ -x "$$(which vim)" ] || $(APT_GET_INSTALL) vim
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
vim-stamp: vim-implementation-$(UNAME)-stamp
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
$(MAKE) -C cache clean
|
|
||||||
$(MAKE) -C dirty clean
|
|
||||||
$(MAKE) -C root clean
|
$(MAKE) -C root clean
|
||||||
$(MAKE) -C package clean
|
|
||||||
rm -rf *-stamp
|
|
||||||
|
|
||||||
|
|
||||||
.PHONY: clean
|
|
||||||
|
|
48
.build/cache/Makefile
vendored
48
.build/cache/Makefile
vendored
|
@ -1,48 +0,0 @@
|
||||||
# Copyright 2013 Prometheus Team
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
.SUFFIXES:
|
|
||||||
|
|
||||||
include ../../Makefile.INCLUDE
|
|
||||||
|
|
||||||
all: populate
|
|
||||||
|
|
||||||
populate: leveldb-$(LEVELDB_VERSION).tar.gz protobuf-$(PROTOCOL_BUFFERS_VERSION).tar.bz2 snappy-$(SNAPPY_VERSION).tar.gz
|
|
||||||
|
|
||||||
leveldb-$(LEVELDB_VERSION).tar.gz: wget-stamp
|
|
||||||
$(WGET) http://leveldb.googlecode.com/files/leveldb-$(LEVELDB_VERSION).tar.gz
|
|
||||||
|
|
||||||
protobuf-$(PROTOCOL_BUFFERS_VERSION).tar.bz2: wget-stamp
|
|
||||||
$(WGET) http://protobuf.googlecode.com/files/$@
|
|
||||||
|
|
||||||
snappy-$(SNAPPY_VERSION).tar.gz: wget-stamp
|
|
||||||
$(WGET) http://snappy.googlecode.com/files/snappy-$(SNAPPY_VERSION).tar.gz
|
|
||||||
|
|
||||||
wget-implementation-Darwin-stamp:
|
|
||||||
[ -x "$$(which wget)" ] || $(BREW_INSTALL) wget
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
wget-implementation-Linux-stamp:
|
|
||||||
[ -x "$$(which wget)" ] || $(APT_GET_INSTALL) wget
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
wget-stamp: wget-implementation-$(UNAME)-stamp
|
|
||||||
[ -x "$$(which wget)" ] || { echo "wget not found." ; false ; }
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
clean:
|
|
||||||
-[ -n "$(REALLY_CLEAN)" ] && rm -rf *.bz2
|
|
||||||
-[ -n "$(REALLY_CLEAN)" ] && rm -rf *.gz
|
|
||||||
rm -rf *-stamp
|
|
||||||
|
|
||||||
.PHONY: clean populate
|
|
1
.build/dirty/.gitignore
vendored
1
.build/dirty/.gitignore
vendored
|
@ -1 +0,0 @@
|
||||||
*
|
|
|
@ -1,22 +0,0 @@
|
||||||
# Copyright 2013 Prometheus Team
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
.SUFFIXES:
|
|
||||||
|
|
||||||
include ../../Makefile.INCLUDE
|
|
||||||
|
|
||||||
all:
|
|
||||||
|
|
||||||
clean:
|
|
||||||
rm -rf *
|
|
||||||
git checkout .
|
|
1
.build/package/.gitignore
vendored
1
.build/package/.gitignore
vendored
|
@ -1 +0,0 @@
|
||||||
*
|
|
|
@ -1,22 +0,0 @@
|
||||||
# Copyright 2013 Prometheus Team
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
.SUFFIXES:
|
|
||||||
|
|
||||||
include ../../Makefile.INCLUDE
|
|
||||||
|
|
||||||
all:
|
|
||||||
|
|
||||||
clean:
|
|
||||||
rm -rf *
|
|
||||||
git checkout .
|
|
1
.build/package/lib/.gitignore
vendored
1
.build/package/lib/.gitignore
vendored
|
@ -1 +0,0 @@
|
||||||
*
|
|
|
@ -1,29 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# If either of the two tests below fail, you may need to install GNU coreutils
|
|
||||||
# in your environment.
|
|
||||||
|
|
||||||
if [ ! -x "$(which readlink)" ]; then
|
|
||||||
echo "readlink tool cannot be found." > /dev/stderr
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -x "$(which dirname)" ]; then
|
|
||||||
echo "dirname tool cannot be found." > /dev/stderr
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
readonly binary="${0}"
|
|
||||||
readonly binary_path="$(readlink -f ${binary})"
|
|
||||||
readonly binary_directory="$(dirname ${binary_path})"
|
|
||||||
|
|
||||||
readonly platform=$(uname | tr '[:upper:]' '[:lower:]')
|
|
||||||
|
|
||||||
|
|
||||||
export LD_LIBRARY_PATH="${binary_directory}/lib:${LD_LIBRARY_PATH}"
|
|
||||||
|
|
||||||
if [[ "${platform}" == "darwin" ]]; then
|
|
||||||
export DYLD_LIBRARY_PATH="${binary_directory}/lib:${DYLD_LIBRARY_PATH}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
exec "${binary_directory}/prometheus" "${@}"
|
|
43
Makefile
43
Makefile
|
@ -22,14 +22,12 @@ $(GOCC): $(BUILD_PATH)/cache/$(GOPKG) $(FULL_GOPATH)
|
||||||
touch $@
|
touch $@
|
||||||
|
|
||||||
advice:
|
advice:
|
||||||
$(GO) tool vet .
|
$(GO) vet ./...
|
||||||
|
|
||||||
binary: build
|
binary: build
|
||||||
|
|
||||||
build: config dependencies model preparation tools web
|
build: config dependencies tools web
|
||||||
$(GO) build -o prometheus $(BUILDFLAGS) .
|
$(GO) build -o prometheus $(BUILDFLAGS) .
|
||||||
cp prometheus $(BUILD_PATH)/package/prometheus
|
|
||||||
rsync -av --delete $(BUILD_PATH)/root/lib/ $(BUILD_PATH)/package/lib/
|
|
||||||
|
|
||||||
docker: build
|
docker: build
|
||||||
docker build -t prometheus:$(REV) .
|
docker build -t prometheus:$(REV) .
|
||||||
|
@ -37,7 +35,7 @@ docker: build
|
||||||
tarball: $(ARCHIVE)
|
tarball: $(ARCHIVE)
|
||||||
|
|
||||||
$(ARCHIVE): build
|
$(ARCHIVE): build
|
||||||
tar -C $(BUILD_PATH)/package -czf $(ARCHIVE) .
|
tar -czf $(ARCHIVE) prometheus
|
||||||
|
|
||||||
release: REMOTE ?= $(error "can't upload, REMOTE not set")
|
release: REMOTE ?= $(error "can't upload, REMOTE not set")
|
||||||
release: REMOTE_DIR ?= $(error "can't upload, REMOTE_DIR not set")
|
release: REMOTE_DIR ?= $(error "can't upload, REMOTE_DIR not set")
|
||||||
|
@ -49,7 +47,7 @@ tag:
|
||||||
git push --tags
|
git push --tags
|
||||||
|
|
||||||
$(BUILD_PATH)/cache/$(GOPKG):
|
$(BUILD_PATH)/cache/$(GOPKG):
|
||||||
curl -o $@ -L $(GOURL)/$(GOPKG)
|
$(CURL) -o $@ -L $(GOURL)/$(GOPKG)
|
||||||
|
|
||||||
benchmark: test
|
benchmark: test
|
||||||
$(GO) test $(GO_TEST_FLAGS) -test.bench='Benchmark' ./...
|
$(GO) test $(GO_TEST_FLAGS) -test.bench='Benchmark' ./...
|
||||||
|
@ -59,15 +57,15 @@ clean:
|
||||||
$(MAKE) -C tools clean
|
$(MAKE) -C tools clean
|
||||||
$(MAKE) -C web clean
|
$(MAKE) -C web clean
|
||||||
rm -rf $(TEST_ARTIFACTS)
|
rm -rf $(TEST_ARTIFACTS)
|
||||||
-rm prometheus.tar.gz
|
-rm $(ARCHIVE)
|
||||||
-find . -type f -iname '*~' -exec rm '{}' ';'
|
-find . -type f -name '*~' -exec rm '{}' ';'
|
||||||
-find . -type f -iname '*#' -exec rm '{}' ';'
|
-find . -type f -name '*#' -exec rm '{}' ';'
|
||||||
-find . -type f -iname '.#*' -exec rm '{}' ';'
|
-find . -type f -name '.#*' -exec rm '{}' ';'
|
||||||
|
|
||||||
config: dependencies preparation
|
config: dependencies
|
||||||
$(MAKE) -C config
|
$(MAKE) -C config
|
||||||
|
|
||||||
dependencies: preparation
|
dependencies: $(GOCC) $(FULL_GOPATH)
|
||||||
$(GO) get -d
|
$(GO) get -d
|
||||||
|
|
||||||
documentation: search_index
|
documentation: search_index
|
||||||
|
@ -76,14 +74,8 @@ documentation: search_index
|
||||||
format:
|
format:
|
||||||
find . -iname '*.go' | egrep -v "^\./\.build|./generated|\.(l|y)\.go" | xargs -n1 $(GOFMT) -w -s=true
|
find . -iname '*.go' | egrep -v "^\./\.build|./generated|\.(l|y)\.go" | xargs -n1 $(GOFMT) -w -s=true
|
||||||
|
|
||||||
model: dependencies preparation
|
|
||||||
$(MAKE) -C model
|
|
||||||
|
|
||||||
preparation: $(GOCC) $(FULL_GOPATH)
|
|
||||||
$(MAKE) -C $(BUILD_PATH)
|
|
||||||
|
|
||||||
race_condition_binary: build
|
race_condition_binary: build
|
||||||
CGO_CFLAGS="-I$(BUILD_PATH)/root/include" CGO_LDFLAGS="-L$(BUILD_PATH)/root/lib" $(GO) build -race -o prometheus.race $(BUILDFLAGS) .
|
$(GO) build -race -o prometheus.race $(BUILDFLAGS) .
|
||||||
|
|
||||||
race_condition_run: race_condition_binary
|
race_condition_run: race_condition_binary
|
||||||
./prometheus.race $(ARGUMENTS)
|
./prometheus.race $(ARGUMENTS)
|
||||||
|
@ -94,7 +86,7 @@ run: binary
|
||||||
search_index:
|
search_index:
|
||||||
godoc -index -write_index -index_files='search_index'
|
godoc -index -write_index -index_files='search_index'
|
||||||
|
|
||||||
server: config dependencies model preparation
|
server: config dependencies
|
||||||
$(MAKE) -C server
|
$(MAKE) -C server
|
||||||
|
|
||||||
# $(FULL_GOPATH) is responsible for ensuring that the builder has not done anything
|
# $(FULL_GOPATH) is responsible for ensuring that the builder has not done anything
|
||||||
|
@ -103,16 +95,13 @@ $(FULL_GOPATH):
|
||||||
-[ -d "$(FULL_GOPATH)" ] || { mkdir -vp $(FULL_GOPATH_BASE) ; ln -s "$(PWD)" "$(FULL_GOPATH)" ; }
|
-[ -d "$(FULL_GOPATH)" ] || { mkdir -vp $(FULL_GOPATH_BASE) ; ln -s "$(PWD)" "$(FULL_GOPATH)" ; }
|
||||||
[ -d "$(FULL_GOPATH)" ]
|
[ -d "$(FULL_GOPATH)" ]
|
||||||
|
|
||||||
test: config dependencies model preparation tools web
|
test: config dependencies tools web
|
||||||
$(GO) test $(GO_TEST_FLAGS) ./...
|
$(GO) test $(GO_TEST_FLAGS) ./...
|
||||||
|
|
||||||
tools: dependencies preparation
|
tools: dependencies
|
||||||
$(MAKE) -C tools
|
$(MAKE) -C tools
|
||||||
|
|
||||||
update:
|
web: config dependencies
|
||||||
$(GO) get -d
|
|
||||||
|
|
||||||
web: config dependencies model preparation
|
|
||||||
$(MAKE) -C web
|
$(MAKE) -C web
|
||||||
|
|
||||||
.PHONY: advice binary build clean config dependencies documentation format model preparation race_condition_binary race_condition_run release run search_index tag tarball test tools update
|
.PHONY: advice binary build clean config dependencies documentation format race_condition_binary race_condition_run release run search_index tag tarball test tools
|
||||||
|
|
|
@ -15,15 +15,7 @@
|
||||||
|
|
||||||
.SUFFIXES:
|
.SUFFIXES:
|
||||||
|
|
||||||
# Set this to "false" to provide verbose builds of third-party components,
|
VERSION=0.8.0
|
||||||
# namely C and C++ dependencies.
|
|
||||||
export SILENCE_THIRD_PARTY_BUILDS := true
|
|
||||||
|
|
||||||
ifeq ($(SILENCE_THIRD_PARTY_BUILDS), true)
|
|
||||||
export THIRD_PARTY_BUILD_OUTPUT := >/dev/null 2>&1
|
|
||||||
else
|
|
||||||
export THIRD_PARTY_BUILD_OUTPUT :=
|
|
||||||
endif
|
|
||||||
|
|
||||||
OS=$(shell uname)
|
OS=$(shell uname)
|
||||||
ARCH=$(shell uname -m)
|
ARCH=$(shell uname -m)
|
||||||
|
@ -34,7 +26,7 @@ MAC_OS_X_VERSION ?= 10.8
|
||||||
|
|
||||||
BUILD_PATH = $(PWD)/.build
|
BUILD_PATH = $(PWD)/.build
|
||||||
|
|
||||||
GO_VERSION := 1.3
|
GO_VERSION := 1.3.3
|
||||||
GOOS = $(subst Darwin,darwin,$(subst Linux,linux,$(OS)))
|
GOOS = $(subst Darwin,darwin,$(subst Linux,linux,$(OS)))
|
||||||
|
|
||||||
ifeq ($(GOOS),darwin)
|
ifeq ($(GOOS),darwin)
|
||||||
|
@ -54,42 +46,18 @@ GOENV = TMPDIR=$(TMPDIR) GOROOT=$(GOROOT) GOPATH=$(GOPATH)
|
||||||
GO = $(GOENV) $(GOCC)
|
GO = $(GOENV) $(GOCC)
|
||||||
GOFMT = $(GOROOT)/bin/gofmt
|
GOFMT = $(GOROOT)/bin/gofmt
|
||||||
|
|
||||||
LEVELDB_VERSION := 1.14.0
|
|
||||||
PROTOCOL_BUFFERS_VERSION := 2.5.0
|
|
||||||
SNAPPY_VERSION := 1.1.0
|
|
||||||
|
|
||||||
UNAME := $(shell uname)
|
UNAME := $(shell uname)
|
||||||
FULL_GOPATH := $(GOPATH)/src/github.com/prometheus/prometheus
|
FULL_GOPATH := $(GOPATH)/src/github.com/prometheus/prometheus
|
||||||
FULL_GOPATH_BASE := $(GOPATH)/src/github.com/prometheus
|
FULL_GOPATH_BASE := $(GOPATH)/src/github.com/prometheus
|
||||||
|
|
||||||
export PREFIX=$(BUILD_PATH)/root
|
export PREFIX=$(BUILD_PATH)/root
|
||||||
|
|
||||||
export LOCAL_BINARIES=$(PREFIX)/bin
|
export PATH := $(GOPATH)/bin:$(PATH)
|
||||||
|
|
||||||
export PATH := $(LOCAL_BINARIES):$(GOPATH)/bin:$(PATH)
|
export GO_TEST_FLAGS ?= "-v -short"
|
||||||
export LD_LIBRARY_PATH := $(PREFIX)/lib:$(LD_LIBRARY_PATH)
|
|
||||||
|
|
||||||
export CFLAGS := $(CFLAGS) -I$(PREFIX)/include -O3
|
|
||||||
export CXXFLAGS := $(CXXFLAGS) -I$(PREFIX)/include -O3
|
|
||||||
export CPPFLAGS := $(CPPFLAGS) -I$(PREFIX)/include -O3
|
|
||||||
export LDFLAGS := $(LDFLAGS) -L$(PREFIX)/lib
|
|
||||||
export PKG_CONFIG_PATH := $(PREFIX)/lib/pkgconfig:$(PKG_CONFIG_PATH)
|
|
||||||
|
|
||||||
export CGO_CFLAGS = $(CFLAGS)
|
|
||||||
export CGO_LDFLAGS = $(LDFLAGS)
|
|
||||||
|
|
||||||
export GO_TEST_FLAGS ?= "-v"
|
|
||||||
|
|
||||||
GO_GET := $(GO) get -u -v -x
|
GO_GET := $(GO) get -u -v -x
|
||||||
APT_GET_INSTALL := sudo apt-get install -y
|
|
||||||
BREW_INSTALL := brew install
|
|
||||||
# By default, wget sets the creation time to match the server's, which throws
|
|
||||||
# off Make. :-(
|
|
||||||
#
|
|
||||||
# Set WGET_OPTIONS to include ``--no-use-server-timestamps`` to alleviate this.
|
|
||||||
WGET := wget $(WGET_OPTIONS) -c
|
|
||||||
|
|
||||||
VERSION := $(shell cat VERSION)
|
|
||||||
REV := $(shell git rev-parse --short HEAD)
|
REV := $(shell git rev-parse --short HEAD)
|
||||||
BRANCH := $(shell git rev-parse --abbrev-ref HEAD)
|
BRANCH := $(shell git rev-parse --abbrev-ref HEAD)
|
||||||
HOSTNAME := $(shell hostname -f)
|
HOSTNAME := $(shell hostname -f)
|
||||||
|
@ -100,11 +68,8 @@ BUILDFLAGS := -ldflags \
|
||||||
-X main.buildBranch $(BRANCH)\
|
-X main.buildBranch $(BRANCH)\
|
||||||
-X main.buildUser $(USER)@$(HOSTNAME)\
|
-X main.buildUser $(USER)@$(HOSTNAME)\
|
||||||
-X main.buildDate $(BUILD_DATE)\
|
-X main.buildDate $(BUILD_DATE)\
|
||||||
-X main.goVersion $(GO_VERSION)\
|
-X main.goVersion $(GO_VERSION)"
|
||||||
-X main.leveldbVersion $(LEVELDB_VERSION)\
|
PROTOC := protoc
|
||||||
-X main.protobufVersion $(PROTOCOL_BUFFERS_VERSION)\
|
CURL := curl
|
||||||
-X main.snappyVersion $(SNAPPY_VERSION)"
|
|
||||||
|
|
||||||
PROTOC := $(LOCAL_BINARIES)/protoc
|
|
||||||
|
|
||||||
ARCHIVE := prometheus-$(VERSION).$(GOOS)-$(GOARCH).tar.gz
|
ARCHIVE := prometheus-$(VERSION).$(GOOS)-$(GOARCH).tar.gz
|
||||||
|
|
19
README.md
19
README.md
|
@ -14,17 +14,16 @@ intervals, evaluate rule expressions, display the results, and trigger an
|
||||||
action if some condition is observed to be true.
|
action if some condition is observed to be true.
|
||||||
|
|
||||||
## Prerequisites
|
## Prerequisites
|
||||||
If you read below in the _Getting Started_ section, the build infrastructure
|
|
||||||
will take care of the following things for you in most cases:
|
|
||||||
|
|
||||||
1. Go 1.1.
|
In your `PATH`, you must have the following binaries available:
|
||||||
2. LevelDB: [https://code.google.com/p/leveldb/](https://code.google.com/p/leveldb/).
|
- `curl`
|
||||||
3. Protocol Buffers Compiler: [http://code.google.com/p/protobuf/](http://code.google.com/p/protobuf/).
|
- `xxd`
|
||||||
4. goprotobuf: the code generator and runtime library: [http://code.google.com/p/goprotobuf/](http://code.google.com/p/goprotobuf/).
|
- `sed`
|
||||||
5. Levigo, a Go-wrapper around LevelDB's C library: [https://github.com/jmhodges/levigo](https://github.com/jmhodges/levigo).
|
- `gzip`
|
||||||
6. GoRest, a RESTful style web-services framework: [http://code.google.com/p/gorest/](http://code.google.com/p/gorest/).
|
|
||||||
7. Prometheus Client, Prometheus in Prometheus [https://github.com/prometheus/client_golang](https://github.com/prometheus/client_golang).
|
If you change any of the `*.proto` files, you need to install [`protoc`, the protobuf compiler](http://code.google.com/p/protobuf/](http://code.google.com/p/protobuf/), v2.5.0 or higher.
|
||||||
8. Snappy, a compression library for LevelDB and Levigo [http://code.google.com/p/snappy/](http://code.google.com/p/snappy/).
|
|
||||||
|
TODO: lexer, golex
|
||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
|
|
|
@ -19,29 +19,23 @@ import (
|
||||||
|
|
||||||
// Build information. Populated by Makefile.
|
// Build information. Populated by Makefile.
|
||||||
var (
|
var (
|
||||||
buildVersion string
|
buildVersion string
|
||||||
buildRevision string
|
buildRevision string
|
||||||
buildBranch string
|
buildBranch string
|
||||||
buildUser string
|
buildUser string
|
||||||
buildDate string
|
buildDate string
|
||||||
goVersion string
|
goVersion string
|
||||||
leveldbVersion string
|
|
||||||
protobufVersion string
|
|
||||||
snappyVersion string
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// BuildInfo encapsulates compile-time metadata about Prometheus made available
|
// BuildInfo encapsulates compile-time metadata about Prometheus made available
|
||||||
// via go tool ld such that this can be reported on-demand.
|
// via go tool ld such that this can be reported on-demand.
|
||||||
var BuildInfo = map[string]string{
|
var BuildInfo = map[string]string{
|
||||||
"version": buildVersion,
|
"version": buildVersion,
|
||||||
"revision": buildRevision,
|
"revision": buildRevision,
|
||||||
"branch": buildBranch,
|
"branch": buildBranch,
|
||||||
"user": buildUser,
|
"user": buildUser,
|
||||||
"date": buildDate,
|
"date": buildDate,
|
||||||
"go_version": goVersion,
|
"go_version": goVersion,
|
||||||
"leveldb_version": leveldbVersion,
|
|
||||||
"protobuf_version": protobufVersion,
|
|
||||||
"snappy_version": snappyVersion,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var versionInfoTmpl = template.Must(template.New("version").Parse(
|
var versionInfoTmpl = template.Must(template.New("version").Parse(
|
||||||
|
@ -49,7 +43,4 @@ var versionInfoTmpl = template.Must(template.New("version").Parse(
|
||||||
build user: {{.user}}
|
build user: {{.user}}
|
||||||
build date: {{.date}}
|
build date: {{.date}}
|
||||||
go version: {{.go_version}}
|
go version: {{.go_version}}
|
||||||
leveldb version: {{.leveldb_version}}
|
|
||||||
protobuf version: {{.protobuf_version}}
|
|
||||||
snappy version: {{.snappy_version}}
|
|
||||||
`))
|
`))
|
||||||
|
|
|
@ -17,12 +17,6 @@ SUFFIXES:
|
||||||
|
|
||||||
include ../Makefile.INCLUDE
|
include ../Makefile.INCLUDE
|
||||||
|
|
||||||
# In order to build the generated targets in this directory, run the
|
|
||||||
# following:
|
|
||||||
#
|
|
||||||
# make -C build goprotobuf-protoc-gen-go-stamp
|
|
||||||
|
|
||||||
|
|
||||||
generated/config.pb.go: config.proto
|
generated/config.pb.go: config.proto
|
||||||
$(MAKE) -C ../.build goprotobuf-protoc-gen-go-stamp
|
$(GO_GET) code.google.com/p/goprotobuf/protoc-gen-go
|
||||||
$(PROTOC) --proto_path=$(PREFIX)/include:. --go_out=generated/ config.proto
|
$(PROTOC) --proto_path=$(PREFIX)/include:. --go_out=generated/ config.proto
|
||||||
|
|
|
@ -1,31 +0,0 @@
|
||||||
# Copyright 2013 Prometheus Team
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
all: generated/data.pb.go generated/descriptor.blob
|
|
||||||
|
|
||||||
SUFFIXES:
|
|
||||||
|
|
||||||
include ../Makefile.INCLUDE
|
|
||||||
|
|
||||||
# In order to build the generated targets in this directory, run the
|
|
||||||
# following:
|
|
||||||
#
|
|
||||||
# make -C .build goprotobuf-protoc-gen-go-stamp
|
|
||||||
|
|
||||||
generated/data.pb.go: data.proto
|
|
||||||
$(MAKE) -C ../.build goprotobuf-protoc-gen-go-stamp
|
|
||||||
$(PROTOC) --proto_path=$(PREFIX)/include:. --include_imports --go_out=generated/ --descriptor_set_out=generated/descriptor.blob data.proto
|
|
||||||
|
|
||||||
generated/descriptor.blob: data.proto
|
|
||||||
$(MAKE) -C ../.build goprotobuf-protoc-gen-go-stamp
|
|
||||||
$(PROTOC) --proto_path=$(PREFIX)/include:. --include_imports --go_out=generated/ --descriptor_set_out=generated/descriptor.blob data.proto
|
|
125
model/data.proto
125
model/data.proto
|
@ -1,125 +0,0 @@
|
||||||
// Copyright 2013 Prometheus Team
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package io.prometheus;
|
|
||||||
|
|
||||||
import "google/protobuf/descriptor.proto";
|
|
||||||
|
|
||||||
message LabelPair {
|
|
||||||
optional string name = 1;
|
|
||||||
optional string value = 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
message LabelName {
|
|
||||||
optional string name = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
message LabelValueCollection {
|
|
||||||
repeated string member = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
message Metric {
|
|
||||||
repeated LabelPair label_pair = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
message Fingerprint {
|
|
||||||
optional string signature = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
message FingerprintCollection {
|
|
||||||
repeated Fingerprint member = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
message LabelSet {
|
|
||||||
repeated LabelPair member = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// The default LevelDB comparator sorts not only lexicographically, but also by
|
|
||||||
// key length (which takes precedence). Thus, no variable-length fields may be
|
|
||||||
// introduced into the key definition below.
|
|
||||||
message SampleKey {
|
|
||||||
optional Fingerprint fingerprint = 1;
|
|
||||||
optional bytes timestamp = 2;
|
|
||||||
optional sfixed64 last_timestamp = 3;
|
|
||||||
optional fixed32 sample_count = 4;
|
|
||||||
}
|
|
||||||
|
|
||||||
message MembershipIndexValue {
|
|
||||||
}
|
|
||||||
|
|
||||||
message MetricHighWatermark {
|
|
||||||
optional int64 timestamp = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// CompactionProcessorDefinition models a curation process across the sample
|
|
||||||
// corpus that ensures that sparse samples.
|
|
||||||
message CompactionProcessorDefinition {
|
|
||||||
// minimum_group_size identifies how minimally samples should be grouped
|
|
||||||
// together to write a new samples chunk.
|
|
||||||
optional uint32 minimum_group_size = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// CurationKey models the state of curation for a given metric fingerprint and
|
|
||||||
// its associated samples. The time series database only knows about compaction
|
|
||||||
// and resampling behaviors that are explicitly defined to it in its runtime
|
|
||||||
// configuration, meaning it never scans on-disk tables for CurationKey
|
|
||||||
// policies; rather, it looks up via the CurationKey tuple to find out what the
|
|
||||||
// effectuation state for a given metric fingerprint is.
|
|
||||||
//
|
|
||||||
// For instance, how far along as a rule for (Fingerprint A, Samples Older Than
|
|
||||||
// B, and Curation Processor) has been effectuated on-disk.
|
|
||||||
message CurationKey {
|
|
||||||
// fingerprint identifies the fingerprint for the given policy.
|
|
||||||
optional Fingerprint fingerprint = 1;
|
|
||||||
|
|
||||||
// processor_message_type_name identifies the underlying message type that
|
|
||||||
// was used to encode processor_message_raw.
|
|
||||||
optional string processor_message_type_name = 2;
|
|
||||||
|
|
||||||
// processor_message_raw identifies the serialized ProcessorSignature for this
|
|
||||||
// operation.
|
|
||||||
optional bytes processor_message_raw = 3;
|
|
||||||
|
|
||||||
// ignore_younger_than represents in seconds relative to when the curation
|
|
||||||
// cycle start when the curator should stop operating. For instance, if
|
|
||||||
// the curation cycle starts at time T and the curation remark dictates that
|
|
||||||
// the curation should starts processing samples at time S, the curator should
|
|
||||||
// work from S until ignore_younger_than seconds before T:
|
|
||||||
//
|
|
||||||
// PAST NOW FUTURE
|
|
||||||
//
|
|
||||||
// S--------------->|----------T
|
|
||||||
// |---IYT----|
|
|
||||||
//
|
|
||||||
// [Curation Resumption Time (S), T - IYT)
|
|
||||||
optional int64 ignore_younger_than = 4;
|
|
||||||
|
|
||||||
// This could be populated by decoding the generated descriptor file into a
|
|
||||||
// FileDescriptorSet message and extracting the type definition for the given
|
|
||||||
// message schema that describes processor_message_type_name.
|
|
||||||
//
|
|
||||||
// optional google.protobuf.DescriptorProto processor_message_type_descriptor_raw = 5;
|
|
||||||
}
|
|
||||||
|
|
||||||
// CurationValue models the progress for a given CurationKey.
|
|
||||||
message CurationValue {
|
|
||||||
// last_completion_timestamp represents the seconds since the epoch UTC at
|
|
||||||
// which the curator last completed its duty cycle for a given metric
|
|
||||||
// fingerprint.
|
|
||||||
optional int64 last_completion_timestamp = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeletionProcessorDefinition models a curation process across the sample
|
|
||||||
// corpus that deletes old values.
|
|
||||||
message DeletionProcessorDefinition {
|
|
||||||
}
|
|
|
@ -1,344 +0,0 @@
|
||||||
// Code generated by protoc-gen-go.
|
|
||||||
// source: data.proto
|
|
||||||
// DO NOT EDIT!
|
|
||||||
|
|
||||||
/*
|
|
||||||
Package io_prometheus is a generated protocol buffer package.
|
|
||||||
|
|
||||||
It is generated from these files:
|
|
||||||
data.proto
|
|
||||||
|
|
||||||
It has these top-level messages:
|
|
||||||
LabelPair
|
|
||||||
LabelName
|
|
||||||
LabelValueCollection
|
|
||||||
Metric
|
|
||||||
Fingerprint
|
|
||||||
FingerprintCollection
|
|
||||||
LabelSet
|
|
||||||
SampleKey
|
|
||||||
MembershipIndexValue
|
|
||||||
MetricHighWatermark
|
|
||||||
CompactionProcessorDefinition
|
|
||||||
CurationKey
|
|
||||||
CurationValue
|
|
||||||
DeletionProcessorDefinition
|
|
||||||
*/
|
|
||||||
package io_prometheus
|
|
||||||
|
|
||||||
import proto "code.google.com/p/goprotobuf/proto"
|
|
||||||
import json "encoding/json"
|
|
||||||
import math "math"
|
|
||||||
|
|
||||||
// discarding unused import google_protobuf "google/protobuf/descriptor.pb"
|
|
||||||
|
|
||||||
// Reference proto, json, and math imports to suppress error if they are not otherwise used.
|
|
||||||
var _ = proto.Marshal
|
|
||||||
var _ = &json.SyntaxError{}
|
|
||||||
var _ = math.Inf
|
|
||||||
|
|
||||||
type LabelPair struct {
|
|
||||||
Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
|
||||||
Value *string `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"`
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *LabelPair) Reset() { *m = LabelPair{} }
|
|
||||||
func (m *LabelPair) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*LabelPair) ProtoMessage() {}
|
|
||||||
|
|
||||||
func (m *LabelPair) GetName() string {
|
|
||||||
if m != nil && m.Name != nil {
|
|
||||||
return *m.Name
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *LabelPair) GetValue() string {
|
|
||||||
if m != nil && m.Value != nil {
|
|
||||||
return *m.Value
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
type LabelName struct {
|
|
||||||
Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *LabelName) Reset() { *m = LabelName{} }
|
|
||||||
func (m *LabelName) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*LabelName) ProtoMessage() {}
|
|
||||||
|
|
||||||
func (m *LabelName) GetName() string {
|
|
||||||
if m != nil && m.Name != nil {
|
|
||||||
return *m.Name
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
type LabelValueCollection struct {
|
|
||||||
Member []string `protobuf:"bytes,1,rep,name=member" json:"member,omitempty"`
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *LabelValueCollection) Reset() { *m = LabelValueCollection{} }
|
|
||||||
func (m *LabelValueCollection) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*LabelValueCollection) ProtoMessage() {}
|
|
||||||
|
|
||||||
func (m *LabelValueCollection) GetMember() []string {
|
|
||||||
if m != nil {
|
|
||||||
return m.Member
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type Metric struct {
|
|
||||||
LabelPair []*LabelPair `protobuf:"bytes,1,rep,name=label_pair" json:"label_pair,omitempty"`
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Metric) Reset() { *m = Metric{} }
|
|
||||||
func (m *Metric) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*Metric) ProtoMessage() {}
|
|
||||||
|
|
||||||
func (m *Metric) GetLabelPair() []*LabelPair {
|
|
||||||
if m != nil {
|
|
||||||
return m.LabelPair
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type Fingerprint struct {
|
|
||||||
Signature *string `protobuf:"bytes,1,opt,name=signature" json:"signature,omitempty"`
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Fingerprint) Reset() { *m = Fingerprint{} }
|
|
||||||
func (m *Fingerprint) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*Fingerprint) ProtoMessage() {}
|
|
||||||
|
|
||||||
func (m *Fingerprint) GetSignature() string {
|
|
||||||
if m != nil && m.Signature != nil {
|
|
||||||
return *m.Signature
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
type FingerprintCollection struct {
|
|
||||||
Member []*Fingerprint `protobuf:"bytes,1,rep,name=member" json:"member,omitempty"`
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *FingerprintCollection) Reset() { *m = FingerprintCollection{} }
|
|
||||||
func (m *FingerprintCollection) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*FingerprintCollection) ProtoMessage() {}
|
|
||||||
|
|
||||||
func (m *FingerprintCollection) GetMember() []*Fingerprint {
|
|
||||||
if m != nil {
|
|
||||||
return m.Member
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type LabelSet struct {
|
|
||||||
Member []*LabelPair `protobuf:"bytes,1,rep,name=member" json:"member,omitempty"`
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *LabelSet) Reset() { *m = LabelSet{} }
|
|
||||||
func (m *LabelSet) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*LabelSet) ProtoMessage() {}
|
|
||||||
|
|
||||||
func (m *LabelSet) GetMember() []*LabelPair {
|
|
||||||
if m != nil {
|
|
||||||
return m.Member
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// The default LevelDB comparator sorts not only lexicographically, but also by
|
|
||||||
// key length (which takes precedence). Thus, no variable-length fields may be
|
|
||||||
// introduced into the key definition below.
|
|
||||||
type SampleKey struct {
|
|
||||||
Fingerprint *Fingerprint `protobuf:"bytes,1,opt,name=fingerprint" json:"fingerprint,omitempty"`
|
|
||||||
Timestamp []byte `protobuf:"bytes,2,opt,name=timestamp" json:"timestamp,omitempty"`
|
|
||||||
LastTimestamp *int64 `protobuf:"fixed64,3,opt,name=last_timestamp" json:"last_timestamp,omitempty"`
|
|
||||||
SampleCount *uint32 `protobuf:"fixed32,4,opt,name=sample_count" json:"sample_count,omitempty"`
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *SampleKey) Reset() { *m = SampleKey{} }
|
|
||||||
func (m *SampleKey) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*SampleKey) ProtoMessage() {}
|
|
||||||
|
|
||||||
func (m *SampleKey) GetFingerprint() *Fingerprint {
|
|
||||||
if m != nil {
|
|
||||||
return m.Fingerprint
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *SampleKey) GetTimestamp() []byte {
|
|
||||||
if m != nil {
|
|
||||||
return m.Timestamp
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *SampleKey) GetLastTimestamp() int64 {
|
|
||||||
if m != nil && m.LastTimestamp != nil {
|
|
||||||
return *m.LastTimestamp
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *SampleKey) GetSampleCount() uint32 {
|
|
||||||
if m != nil && m.SampleCount != nil {
|
|
||||||
return *m.SampleCount
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
type MembershipIndexValue struct {
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *MembershipIndexValue) Reset() { *m = MembershipIndexValue{} }
|
|
||||||
func (m *MembershipIndexValue) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*MembershipIndexValue) ProtoMessage() {}
|
|
||||||
|
|
||||||
type MetricHighWatermark struct {
|
|
||||||
Timestamp *int64 `protobuf:"varint,1,opt,name=timestamp" json:"timestamp,omitempty"`
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *MetricHighWatermark) Reset() { *m = MetricHighWatermark{} }
|
|
||||||
func (m *MetricHighWatermark) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*MetricHighWatermark) ProtoMessage() {}
|
|
||||||
|
|
||||||
func (m *MetricHighWatermark) GetTimestamp() int64 {
|
|
||||||
if m != nil && m.Timestamp != nil {
|
|
||||||
return *m.Timestamp
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// CompactionProcessorDefinition models a curation process across the sample
|
|
||||||
// corpus that ensures that sparse samples.
|
|
||||||
type CompactionProcessorDefinition struct {
|
|
||||||
// minimum_group_size identifies how minimally samples should be grouped
|
|
||||||
// together to write a new samples chunk.
|
|
||||||
MinimumGroupSize *uint32 `protobuf:"varint,1,opt,name=minimum_group_size" json:"minimum_group_size,omitempty"`
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *CompactionProcessorDefinition) Reset() { *m = CompactionProcessorDefinition{} }
|
|
||||||
func (m *CompactionProcessorDefinition) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*CompactionProcessorDefinition) ProtoMessage() {}
|
|
||||||
|
|
||||||
func (m *CompactionProcessorDefinition) GetMinimumGroupSize() uint32 {
|
|
||||||
if m != nil && m.MinimumGroupSize != nil {
|
|
||||||
return *m.MinimumGroupSize
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// CurationKey models the state of curation for a given metric fingerprint and
|
|
||||||
// its associated samples. The time series database only knows about compaction
|
|
||||||
// and resampling behaviors that are explicitly defined to it in its runtime
|
|
||||||
// configuration, meaning it never scans on-disk tables for CurationKey
|
|
||||||
// policies; rather, it looks up via the CurationKey tuple to find out what the
|
|
||||||
// effectuation state for a given metric fingerprint is.
|
|
||||||
//
|
|
||||||
// For instance, how far along as a rule for (Fingerprint A, Samples Older Than
|
|
||||||
// B, and Curation Processor) has been effectuated on-disk.
|
|
||||||
type CurationKey struct {
|
|
||||||
// fingerprint identifies the fingerprint for the given policy.
|
|
||||||
Fingerprint *Fingerprint `protobuf:"bytes,1,opt,name=fingerprint" json:"fingerprint,omitempty"`
|
|
||||||
// processor_message_type_name identifies the underlying message type that
|
|
||||||
// was used to encode processor_message_raw.
|
|
||||||
ProcessorMessageTypeName *string `protobuf:"bytes,2,opt,name=processor_message_type_name" json:"processor_message_type_name,omitempty"`
|
|
||||||
// processor_message_raw identifies the serialized ProcessorSignature for this
|
|
||||||
// operation.
|
|
||||||
ProcessorMessageRaw []byte `protobuf:"bytes,3,opt,name=processor_message_raw" json:"processor_message_raw,omitempty"`
|
|
||||||
// ignore_younger_than represents in seconds relative to when the curation
|
|
||||||
// cycle start when the curator should stop operating. For instance, if
|
|
||||||
// the curation cycle starts at time T and the curation remark dictates that
|
|
||||||
// the curation should starts processing samples at time S, the curator should
|
|
||||||
// work from S until ignore_younger_than seconds before T:
|
|
||||||
//
|
|
||||||
// PAST NOW FUTURE
|
|
||||||
//
|
|
||||||
// S--------------->|----------T
|
|
||||||
// |---IYT----|
|
|
||||||
//
|
|
||||||
// [Curation Resumption Time (S), T - IYT)
|
|
||||||
IgnoreYoungerThan *int64 `protobuf:"varint,4,opt,name=ignore_younger_than" json:"ignore_younger_than,omitempty"`
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *CurationKey) Reset() { *m = CurationKey{} }
|
|
||||||
func (m *CurationKey) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*CurationKey) ProtoMessage() {}
|
|
||||||
|
|
||||||
func (m *CurationKey) GetFingerprint() *Fingerprint {
|
|
||||||
if m != nil {
|
|
||||||
return m.Fingerprint
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *CurationKey) GetProcessorMessageTypeName() string {
|
|
||||||
if m != nil && m.ProcessorMessageTypeName != nil {
|
|
||||||
return *m.ProcessorMessageTypeName
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *CurationKey) GetProcessorMessageRaw() []byte {
|
|
||||||
if m != nil {
|
|
||||||
return m.ProcessorMessageRaw
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *CurationKey) GetIgnoreYoungerThan() int64 {
|
|
||||||
if m != nil && m.IgnoreYoungerThan != nil {
|
|
||||||
return *m.IgnoreYoungerThan
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// CurationValue models the progress for a given CurationKey.
|
|
||||||
type CurationValue struct {
|
|
||||||
// last_completion_timestamp represents the seconds since the epoch UTC at
|
|
||||||
// which the curator last completed its duty cycle for a given metric
|
|
||||||
// fingerprint.
|
|
||||||
LastCompletionTimestamp *int64 `protobuf:"varint,1,opt,name=last_completion_timestamp" json:"last_completion_timestamp,omitempty"`
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *CurationValue) Reset() { *m = CurationValue{} }
|
|
||||||
func (m *CurationValue) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*CurationValue) ProtoMessage() {}
|
|
||||||
|
|
||||||
func (m *CurationValue) GetLastCompletionTimestamp() int64 {
|
|
||||||
if m != nil && m.LastCompletionTimestamp != nil {
|
|
||||||
return *m.LastCompletionTimestamp
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeletionProcessorDefinition models a curation process across the sample
|
|
||||||
// corpus that deletes old values.
|
|
||||||
type DeletionProcessorDefinition struct {
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *DeletionProcessorDefinition) Reset() { *m = DeletionProcessorDefinition{} }
|
|
||||||
func (m *DeletionProcessorDefinition) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*DeletionProcessorDefinition) ProtoMessage() {}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
}
|
|
Binary file not shown.
|
@ -14,10 +14,11 @@
|
||||||
all: parser.y.go lexer.l.go
|
all: parser.y.go lexer.l.go
|
||||||
|
|
||||||
parser.y.go: parser.y
|
parser.y.go: parser.y
|
||||||
go tool yacc -o parser.y.go -v "" parser.y
|
$(GOCC) tool yacc -o parser.y.go -v "" parser.y
|
||||||
|
|
||||||
lexer.l.go: parser.y.go lexer.l
|
lexer.l.go: parser.y.go lexer.l
|
||||||
# This is golex from https://github.com/cznic/golex.
|
# This is golex from https://github.com/cznic/golex.
|
||||||
|
$(GO_GET) github.com/cznic/golex
|
||||||
golex -o="lexer.l.go" lexer.l
|
golex -o="lexer.l.go" lexer.l
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
|
|
|
@ -11,20 +11,16 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
all: dumper rule_checker
|
all: rule_checker
|
||||||
|
|
||||||
SUFFIXES:
|
SUFFIXES:
|
||||||
|
|
||||||
include ../Makefile.INCLUDE
|
include ../Makefile.INCLUDE
|
||||||
|
|
||||||
dumper:
|
|
||||||
$(MAKE) -C dumper
|
|
||||||
|
|
||||||
rule_checker:
|
rule_checker:
|
||||||
$(MAKE) -C rule_checker
|
$(MAKE) -C rule_checker
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
$(MAKE) -C dumper clean
|
|
||||||
$(MAKE) -C rule_checker clean
|
$(MAKE) -C rule_checker clean
|
||||||
|
|
||||||
.PHONY: clean dumper rule_checker
|
.PHONY: clean rule_checker
|
||||||
|
|
1
tools/dumper/.gitignore
vendored
1
tools/dumper/.gitignore
vendored
|
@ -1 +0,0 @@
|
||||||
dumper
|
|
|
@ -1,28 +0,0 @@
|
||||||
# Copyright 2013 Prometheus Team
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
MAKE_ARTIFACTS = dumper
|
|
||||||
|
|
||||||
all: dumper
|
|
||||||
|
|
||||||
SUFFIXES:
|
|
||||||
|
|
||||||
include ../../Makefile.INCLUDE
|
|
||||||
|
|
||||||
dumper: $(shell find . -iname '*.go')
|
|
||||||
$(GO) build -o dumper .
|
|
||||||
|
|
||||||
clean:
|
|
||||||
rm -rf $(MAKE_ARTIFACTS)
|
|
||||||
|
|
||||||
.PHONY: clean
|
|
|
@ -1,105 +0,0 @@
|
||||||
// Copyright 2013 Prometheus Team
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
// Dumper is responsible for dumping all samples along with metadata contained
|
|
||||||
// in a given Prometheus metrics storage. It prints samples in unquoted CSV
|
|
||||||
// format, with commas as field separators:
|
|
||||||
//
|
|
||||||
// <fingerprint>,<chunk_first_time>,<chunk_last_time>,<chunk_sample_count>,<chunk_index>,<timestamp>,<value>
|
|
||||||
package main
|
|
||||||
|
|
||||||
/*
|
|
||||||
import (
|
|
||||||
"encoding/csv"
|
|
||||||
"flag"
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"strconv"
|
|
||||||
|
|
||||||
"github.com/golang/glog"
|
|
||||||
|
|
||||||
"github.com/prometheus/prometheus/storage"
|
|
||||||
"github.com/prometheus/prometheus/storage/metric"
|
|
||||||
"github.com/prometheus/prometheus/storage/metric/tiered"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
storageRoot = flag.String("storage.root", "", "The path to the storage root for Prometheus.")
|
|
||||||
dieOnBadChunk = flag.Bool("dieOnBadChunk", false, "Whether to die upon encountering a bad chunk.")
|
|
||||||
)
|
|
||||||
|
|
||||||
type SamplesDumper struct {
|
|
||||||
*csv.Writer
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *SamplesDumper) Operate(key, value interface{}) *storage.OperatorError {
|
|
||||||
sampleKey := key.(*tiered.SampleKey)
|
|
||||||
if *dieOnBadChunk && sampleKey.FirstTimestamp.After(sampleKey.LastTimestamp) {
|
|
||||||
glog.Fatalf("Chunk: First time (%v) after last time (%v): %v\n", sampleKey.FirstTimestamp.Unix(), sampleKey.LastTimestamp.Unix(), sampleKey)
|
|
||||||
}
|
|
||||||
for i, sample := range value.(metric.Values) {
|
|
||||||
if *dieOnBadChunk && (sample.Timestamp.Before(sampleKey.FirstTimestamp) || sample.Timestamp.After(sampleKey.LastTimestamp)) {
|
|
||||||
glog.Fatalf("Sample not within chunk boundaries: chunk FirstTimestamp (%v), chunk LastTimestamp (%v) vs. sample Timestamp (%v)\n", sampleKey.FirstTimestamp.Unix(), sampleKey.LastTimestamp.Unix(), sample.Timestamp)
|
|
||||||
}
|
|
||||||
d.Write([]string{
|
|
||||||
sampleKey.Fingerprint.String(),
|
|
||||||
strconv.FormatInt(sampleKey.FirstTimestamp.Unix(), 10),
|
|
||||||
strconv.FormatInt(sampleKey.LastTimestamp.Unix(), 10),
|
|
||||||
strconv.FormatUint(uint64(sampleKey.SampleCount), 10),
|
|
||||||
strconv.Itoa(i),
|
|
||||||
strconv.FormatInt(sample.Timestamp.Unix(), 10),
|
|
||||||
fmt.Sprintf("%v", sample.Value),
|
|
||||||
})
|
|
||||||
if err := d.Error(); err != nil {
|
|
||||||
return &storage.OperatorError{
|
|
||||||
Error: err,
|
|
||||||
Continuable: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
flag.Parse()
|
|
||||||
|
|
||||||
if storageRoot == nil || *storageRoot == "" {
|
|
||||||
glog.Fatal("Must provide a path...")
|
|
||||||
}
|
|
||||||
|
|
||||||
persistence, err := tiered.NewLevelDBPersistence(*storageRoot)
|
|
||||||
if err != nil {
|
|
||||||
glog.Fatal(err)
|
|
||||||
}
|
|
||||||
defer persistence.Close()
|
|
||||||
|
|
||||||
dumper := &SamplesDumper{
|
|
||||||
csv.NewWriter(os.Stdout),
|
|
||||||
}
|
|
||||||
|
|
||||||
entire, err := persistence.MetricSamples.ForEach(&tiered.MetricSamplesDecoder{}, &tiered.AcceptAllFilter{}, dumper)
|
|
||||||
if err != nil {
|
|
||||||
glog.Fatal("Error dumping samples: ", err)
|
|
||||||
}
|
|
||||||
if !entire {
|
|
||||||
glog.Fatal("Didn't scan entire corpus")
|
|
||||||
}
|
|
||||||
dumper.Flush()
|
|
||||||
if err = dumper.Error(); err != nil {
|
|
||||||
glog.Fatal("Error flushing CSV: ", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
}
|
|
|
@ -13,7 +13,7 @@
|
||||||
|
|
||||||
all: blob-stamp
|
all: blob-stamp
|
||||||
|
|
||||||
blob-stamp: static/generated/protocol_buffer.descriptor templates/*
|
blob-stamp: templates/*
|
||||||
$(MAKE) -C blob
|
$(MAKE) -C blob
|
||||||
touch $@
|
touch $@
|
||||||
|
|
||||||
|
@ -21,10 +21,4 @@ clean:
|
||||||
$(MAKE) -C blob clean
|
$(MAKE) -C blob clean
|
||||||
-rm -f *-stamp
|
-rm -f *-stamp
|
||||||
|
|
||||||
static/generated:
|
|
||||||
mkdir -vp static/generated
|
|
||||||
|
|
||||||
static/generated/protocol_buffer.descriptor: static/generated ../model/generated/descriptor.blob
|
|
||||||
cp -f ../model/generated/descriptor.blob $@
|
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
|
|
|
@ -22,6 +22,5 @@ files.go: $(shell find ../templates/ ../static/ -type f)
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
-rm files.go
|
-rm files.go
|
||||||
-rm -rf generated
|
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
|
|
Loading…
Reference in a new issue