Skip to content

Commit

Permalink
Update
Browse files Browse the repository at this point in the history
Signed-off-by: Yi Chen <[email protected]>
  • Loading branch information
ChenYi015 committed Jul 1, 2024
1 parent 3bd806b commit 21cbf65
Show file tree
Hide file tree
Showing 179 changed files with 7,642 additions and 7,489 deletions.
5 changes: 2 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
bin/
vendor/
spark-operator
**/*.iml
cover.out
sparkctl/sparkctl
spark-on-k8s-operator
sparkctl/sparkctl-linux-amd64
sparkctl/sparkctl-darwin-amd64
**/*.iml

# Various IDEs
.idea/
Expand Down
26 changes: 26 additions & 0 deletions .golangci.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
run:
deadline: 5m

linters:
enable:
- revive
- gci
- depguard
- godot
- testifylint
- unconvert

issues:
exclude-rules:
# Disable errcheck linter for test files.
- path: _test.go
linters:
- errcheck

linters-settings:
gci:
sections:
- standard
- default
- prefix(github.com/kubeflow/spark-operator)

5 changes: 3 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,13 @@ COPY go.sum go.sum
RUN go mod download

# Copy the go source code
COPY main.go main.go
COPY api/ api/
COPY cmd/ cmd/
COPY pkg/ pkg/

# Build
ARG TARGETARCH
RUN CGO_ENABLED=0 GOOS=linux GOARCH=${TARGETARCH} GO111MODULE=on go build -a -o /usr/bin/spark-operator main.go
RUN CGO_ENABLED=0 GOOS=linux GOARCH=${TARGETARCH} GO111MODULE=on go build -a -o /usr/bin/spark-operator cmd/main.go

FROM ${SPARK_IMAGE}
USER root
Expand Down
74 changes: 51 additions & 23 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -17,19 +17,27 @@ DEP_VERSION:=`grep DEP_VERSION= Dockerfile | awk -F\" '{print $$2}'`
BUILDER=`grep "FROM golang:" Dockerfile | awk '{print $$2}'`
UNAME:=`uname | tr '[:upper:]' '[:lower:]'`
REPO=github.com/kubeflow/spark-operator
SPARK_OPERATOR_CHART_PATH=charts/spark-operator-chart

# CONTAINER_TOOL defines the container tool to be used for building images.
# Be aware that the target commands are only tested with Docker which is
# scaffolded by default. However, you might want to replace it to use other
# tools. (i.e. podman)
CONTAINER_TOOL ?= docker

OPERATOR_VERSION ?= $$(grep appVersion $(SPARK_OPERATOR_CHART_PATH)/Chart.yaml | awk '{print $$2}')
# Image URL to use all building/pushing image targets
IMAGE_REPOSITORY ?= docker.io/kubeflow/spark-operator
IMAGE_TAG ?= latest
IMAGE_TAG ?= $(OPERATOR_VERSION)
OPERATOR_IMAGE ?= $(IMAGE_REPOSITORY):$(IMAGE_TAG)

# Kind cluster
KIND_CLUSTER_NAME ?= spark-operator
KIND_CONFIG_FILE ?= charts/spark-operator-chart/ci/kind-config.yaml
KIND_KUBE_CONFIG ?= $(HOME)/.kube/config

# ENVTEST_K8S_VERSION refers to the version of kubebuilder assets to be downloaded by envtest binary.
ENVTEST_K8S_VERSION = 1.30.0
ENVTEST_K8S_VERSION = 1.29.3

##@ General

Expand All @@ -46,7 +54,7 @@ ENVTEST_K8S_VERSION = 1.30.0

.PHONY: help
help: ## Display this help.
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-30s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)

##@ Development

Expand Down Expand Up @@ -76,14 +84,22 @@ go-fmt: ## Run go fmt against code
go-vet: ## Run go vet against code.
go vet ./...

.PHONY: test
test: manifests generate go-fmt go-vet envtest ## Run go tests.
KUBEBUILDER_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) --bin-dir $(LOCALBIN) -p path)" go test $$(go list ./... | grep -v /e2e) -coverprofile cover.out
.PHONY: unit-test
unit-test: manifests generate go-fmt go-vet envtest ## Run unit tests.
@echo "Running unit tests..."
KUBEBUILDER_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) --bin-dir $(LOCALBIN) -p path)"
go test $$(go list ./... | grep -v /e2e) -coverprofile cover.out

# Utilize Kind or modify the e2e tests to load the image locally, enabling compatibility with other vendors.
.PHONY: test-e2e # Run the e2e tests against a Kind k8s instance that is spun up.
test-e2e:
go test ./test/e2e/ -v -ginkgo.v
.PHONY: e2e-test # Run the e2e tests against a Kind k8s instance that is spun up.
e2e-test: envtest kind-create-cluster
@echo "Running e2e tests..."
go test ./test/e2e/ -v -ginkgo.v -timeout 30m

.PHONY: integration-test
integration-test: clean ## Run integration tests
@echo "Running integration tests..."
go test -v ./test/e2e/ --kubeconfig "$(HOME)/.kube/config" --operator-image=gcr.io/spark-operator/spark-operator:local

.PHONY: lint
lint: golangci-lint ## Run golangci-lint linter
Expand All @@ -95,7 +111,7 @@ lint-fix: golangci-lint ## Run golangci-lint linter and perform fixes

.PHONY: fmt-check
fmt-check: clean ## Run go fmt against code
@echo "running fmt check"; cd "$(dirname $0)"; \
@echo "Running fmt check"; cd "$(dirname $0)"; \
if [ -n "$(go fmt ./...)" ]; \
then \
echo "Go code is not formatted, please run 'go fmt ./...'." >&2; \
Expand All @@ -108,16 +124,6 @@ fmt-check: clean ## Run go fmt against code
detect-crds-drift:
diff -q charts/spark-operator-chart/crds config/crd/bases

.PHONY: unit-test
unit-test: clean ## Run unit tests
@echo "running unit tests"
go test -v ./... -covermode=atomic

.PHONY: integration-test
integration-test: clean ## Run integration tests
@echo "running integration tests"
go test -v ./test/e2e/ --kubeconfig "$(HOME)/.kube/config" --operator-image=gcr.io/spark-operator/spark-operator:local

.PHONY: static-analysis
static-analysis:
@echo "running go vet"
Expand Down Expand Up @@ -150,8 +156,8 @@ build-sparkctl: ## Build sparkctl binary
docker run -w $(SPARK_OPERATOR_GOPATH) \
-v $$(pwd):$(SPARK_OPERATOR_GOPATH) $(BUILDER) sh -c \
"apk add --no-cache bash git && \
cd sparkctl && \
./build.sh" || true
cd bin/sparkctl && \
bash build.sh" || true

.PHONY: install-sparkctl
install-sparkctl: | sparkctl/sparkctl-darwin-amd64 sparkctl/sparkctl-linux-amd64 ## Install sparkctl binary
Expand Down Expand Up @@ -184,7 +190,7 @@ build-api-docs: gen-crd-api-reference-docs
# More info: https://docs.docker.com/develop/develop-images/build_enhancements/
.PHONY: docker-build
docker-build: ## Build docker image with the operator.
$(CONTAINER_TOOL) build -t ${IMAGE_REPOSITORY}:${IMAGE_TAG} .
$(CONTAINER_TOOL) build -t $(OPERATOR_IMAGE) .

.PHONY: docker-push
docker-push: ## Push docker image with the operator.
Expand Down Expand Up @@ -213,6 +219,21 @@ ifndef ignore-not-found
ignore-not-found = false
endif

.PHONY: kind-create-cluster
kind-create-cluster: kind ## Create a kind cluster for integration tests.
if ! $(KIND) get clusters 2>/dev/null | grep -q "^$(KIND_CLUSTER_NAME)$$"; then \
kind create cluster --name $(KIND_CLUSTER_NAME) --config $(KIND_CONFIG_FILE) --kubeconfig $(KIND_KUBE_CONFIG); \
fi

.PHONY: kind-load-image
kind-load-image: kind-create-cluster docker-build ## Load the image into the kind cluster.
kind load docker-image --name $(KIND_CLUSTER_NAME) $(OPERATOR_IMAGE)

.PHONY: kind-delete-custer
kind-delete-custer: kind ## Delete the created kind cluster.
$(KIND) delete cluster --name $(KIND_CLUSTER_NAME) && \
rm -f $(KIND_KUBE_CONFIG)

.PHONY: install
install: manifests kustomize ## Install CRDs into the K8s cluster specified in ~/.kube/config.
$(KUSTOMIZE) build config/crd | $(KUBECTL) apply -f -
Expand Down Expand Up @@ -241,13 +262,15 @@ $(LOCALBIN):
KUBECTL ?= kubectl
KUSTOMIZE ?= $(LOCALBIN)/kustomize-$(KUSTOMIZE_VERSION)
CONTROLLER_GEN ?= $(LOCALBIN)/controller-gen-$(CONTROLLER_TOOLS_VERSION)
KIND ?= $(LOCALBIN)/kind-$(KIND_VERSION)
ENVTEST ?= $(LOCALBIN)/setup-envtest-$(ENVTEST_VERSION)
GOLANGCI_LINT = $(LOCALBIN)/golangci-lint-$(GOLANGCI_LINT_VERSION)
GEN_CRD_API_REFERENCE_DOCS ?= $(LOCALBIN)/gen-crd-api-reference-docs-$(GEN_CRD_API_REFERENCE_DOCS_VERSION)

## Tool Versions
KUSTOMIZE_VERSION ?= v5.4.1
CONTROLLER_TOOLS_VERSION ?= v0.15.0
KIND_VERSION ?= v0.23.0
ENVTEST_VERSION ?= release-0.18
GOLANGCI_LINT_VERSION ?= v1.57.2
GEN_CRD_API_REFERENCE_DOCS_VERSION ?= v0.3.0
Expand All @@ -262,6 +285,11 @@ controller-gen: $(CONTROLLER_GEN) ## Download controller-gen locally if necessar
$(CONTROLLER_GEN): $(LOCALBIN)
$(call go-install-tool,$(CONTROLLER_GEN),sigs.k8s.io/controller-tools/cmd/controller-gen,$(CONTROLLER_TOOLS_VERSION))

.PHONY: kind
kind: $(KIND) ## Download kind locally if necessary.
$(KIND): $(LOCALBIN)
$(call go-install-tool,$(KIND),sigs.k8s.io/kind,$(KIND_VERSION))

.PHONY: envtest
envtest: $(ENVTEST) ## Download setup-envtest locally if necessary.
$(ENVTEST): $(LOCALBIN)
Expand Down
57 changes: 37 additions & 20 deletions PROJECT
Original file line number Diff line number Diff line change
Expand Up @@ -4,27 +4,44 @@
# More info: https://book.kubebuilder.io/reference/project-config.html
domain: sparkoperator.k8s.io
layout:
- go.kubebuilder.io/v4
- go.kubebuilder.io/v4
projectName: spark-operator
repo: github.com/kubeflow/spark-operator
resources:
- api:
crdVersion: v1
namespaced: true
controller: true
domain: sparkoperator.k8s.io
kind: SparkApplication
path: github.com/kubeflow/spark-operator/api/v1beta2
version: v1beta2
- api:
crdVersion: v1
namespaced: true
controller: true
domain: sparkoperator.k8s.io
kind: ScheduledSparkApplication
path: github.com/kubeflow/spark-operator/api/v1beta2
version: v1beta2
- domain: sparkoperator.k8s.io
kind: SparkApplication
version: v1beta1
- api:
crdVersion: v1
namespaced: true
controller: true
domain: sparkoperator.k8s.io
kind: SparkApplication
path: github.com/kubeflow/spark-operator/api/v1beta1
version: v1beta1
- api:
crdVersion: v1
namespaced: true
controller: true
domain: sparkoperator.k8s.io
kind: ScheduledSparkApplication
path: github.com/kubeflow/spark-operator/api/v1beta1
version: v1beta1
- api:
crdVersion: v1
namespaced: true
controller: true
domain: sparkoperator.k8s.io
kind: SparkApplication
path: github.com/kubeflow/spark-operator/api/v1beta2
version: v1beta2
webhooks:
defaulting: true
validation: true
webhookVersion: v1
- api:
crdVersion: v1
namespaced: true
controller: true
domain: sparkoperator.k8s.io
kind: ScheduledSparkApplication
path: github.com/kubeflow/spark-operator/api/v1beta2
version: v1beta2
version: "3"
4 changes: 2 additions & 2 deletions api/v1beta1/groupversion_info.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ import (
)

var (
// GroupVersion is group version used to register these objects
// GroupVersion is group version used to register these objects.
GroupVersion = schema.GroupVersion{Group: "sparkoperator.k8s.io", Version: "v1beta1"}

// SchemeBuilder is used to add go types to the GroupVersionKind scheme
// SchemeBuilder is used to add go types to the GroupVersionKind scheme.
SchemeBuilder = &scheme.Builder{GroupVersion: GroupVersion}

// AddToScheme adds the types in this group-version to the given scheme.
Expand Down
14 changes: 0 additions & 14 deletions api/v1beta1/register.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@ limitations under the License.
package v1beta1

import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
)

Expand All @@ -34,15 +32,3 @@ var SchemeGroupVersion = schema.GroupVersion{Group: Group, Version: Version}
func Resource(resource string) schema.GroupResource {
return SchemeGroupVersion.WithResource(resource).GroupResource()
}

// addKnownTypes adds the set of types defined in this package to the supplied scheme.
func addKnownTypes(scheme *runtime.Scheme) error {
scheme.AddKnownTypes(SchemeGroupVersion,
&SparkApplication{},
&SparkApplicationList{},
&ScheduledSparkApplication{},
&ScheduledSparkApplicationList{},
)
metav1.AddToGroupVersion(scheme, SchemeGroupVersion)
return nil
}
2 changes: 1 addition & 1 deletion api/v1beta1/zz_generated.deepcopy.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 7 additions & 3 deletions api/v1beta2/defaults.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,19 @@ func SetSparkApplicationDefaults(app *SparkApplication) {
return
}

if app.Spec.Type == "" {
app.Spec.Type = SparkApplicationTypeScala
}

if app.Spec.Mode == "" {
app.Spec.Mode = ClusterMode
app.Spec.Mode = DeployModeCluster
}

if app.Spec.RestartPolicy.Type == "" {
app.Spec.RestartPolicy.Type = Never
app.Spec.RestartPolicy.Type = RestartPolicyNever
}

if app.Spec.RestartPolicy.Type != Never {
if app.Spec.RestartPolicy.Type != RestartPolicyNever {
// Default to 5 sec if the RestartPolicy is OnFailure or Always and these values aren't specified.
if app.Spec.RestartPolicy.OnFailureRetryInterval == nil {
app.Spec.RestartPolicy.OnFailureRetryInterval = new(int64)
Expand Down
Loading

0 comments on commit 21cbf65

Please sign in to comment.