diff --git a/.drone.yml b/.drone.yml index 3cc00040e1335a695cf8864de1e7c0a4e6f5e940..c9159588ad17516b919003c8ec3f476b2817c85c 100644 --- a/.drone.yml +++ b/.drone.yml @@ -186,6 +186,75 @@ volumes: host: path: /var/run/docker.sock +--- +kind: pipeline +name: s390x + +platform: + os: linux + arch: amd64 + +# Hack needed for s390x: https://gist.github.com/colstrom/c2f359f72658aaabb44150ac20b16d7c#gistcomment-3858388 +node: + arch: s390x + +steps: +- name: build + image: rancher/dapper:v0.5.8 + commands: + - dapper ci + volumes: + - name: docker + path: /var/run/docker.sock + +- name: github_binary_release + image: rancher/drone-images:github-release-s390x + settings: + api_key: + from_secret: github_token + prerelease: true + checksum: + - sha256 + checksum_file: CHECKSUMsum-s390x.txt + checksum_flatten: true + files: + - "dist/artifacts/*" + when: + instance: + - drone-publish.rancher.io + ref: + - refs/head/master + - refs/tags/* + event: + - tag + +- name: docker-publish + image: rancher/drone-images:docker-s390x + volumes: + - name: docker + path: /var/run/docker.sock + settings: + dockerfile: package/Dockerfile + password: + from_secret: docker_password + repo: "rancher/local-path-provisioner" + tag: "${DRONE_TAG}-s390x" + username: + from_secret: docker_username + when: + instance: + - drone-publish.rancher.io + ref: + - refs/head/master + - refs/tags/* + event: + - tag + +volumes: +- name: docker + host: + path: /var/run/docker.sock + --- kind: pipeline name: manifest @@ -206,6 +275,7 @@ steps: - linux/amd64 - linux/arm64 - linux/arm + - linux/s390x target: "rancher/local-path-provisioner:${DRONE_TAG}" template: "rancher/local-path-provisioner:${DRONE_TAG}-ARCH" when: @@ -221,3 +291,4 @@ depends_on: - amd64 - arm64 - arm +- s390x diff --git a/README.md b/README.md index c6bb21f74492bcb361d81a6c5288507c81f2daf0..dc3a06e4cd5d191395390d440ac3c3ed3008372e 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,7 @@ local-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m Check and follow the provisioner log using: ``` -$ kubectl -n local-path-storage logs -f -l app=local-path-provisioner +kubectl -n local-path-storage logs -f -l app=local-path-provisioner ``` ## Usage diff --git a/deploy/chart/Chart.yaml b/deploy/chart/Chart.yaml index b9b7873a77514e7af2bf6426ddaa3c3969decb23..80c946762b9cd3a18ff6a1cc1b1b892bf1f3423a 100644 --- a/deploy/chart/Chart.yaml +++ b/deploy/chart/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: Use HostPath for persistent local storage with Kubernetes name: local-path-provisioner -version: 0.0.20 -appVersion: "v0.0.20" +version: 0.0.21 +appVersion: "v0.0.21" keywords: - storage - hostpath diff --git a/deploy/chart/README.md b/deploy/chart/README.md index 8999003011f2c52b934321a43043ad067e03a44b..07f52dbc5f79106a069d58e15fbcdb6ab5a20f1b 100644 --- a/deploy/chart/README.md +++ b/deploy/chart/README.md @@ -56,7 +56,7 @@ default values. | Parameter | Description | Default | | ----------------------------------- | ------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | | `image.repository` | Local Path Provisioner image name | `rancher/local-path-provisioner` | -| `image.tag` | Local Path Provisioner image tag | `v0.0.20` | +| `image.tag` | Local Path Provisioner image tag | `v0.0.21` | | `image.pullPolicy` | Image pull policy | `IfNotPresent` | | `storageClass.create` | If true, create a `StorageClass` | `true` | | `storageClass.provisionerName` | The provisioner name for the storage class | `nil` | diff --git a/deploy/chart/templates/deployment.yaml b/deploy/chart/templates/deployment.yaml index ad42ae01b08d2b6c48afce91fddb286b86423403..070781ea3fb1fb0257003f2fcf1dd2922bae4214 100644 --- a/deploy/chart/templates/deployment.yaml +++ b/deploy/chart/templates/deployment.yaml @@ -47,6 +47,18 @@ spec: {{- end }} - --configmap-name - {{ .Values.configmap.name }} + {{- if .Values.workerThreads }} + - --worker-threads + - {{ .Values.workerThreads }} + {{- end }} + {{- if .Values.provisioningRetryCount }} + - --provisioning-retry-count + - {{ .Values.provisioningRetryCount }} + {{- end }} + {{- if .Values.deletionRetryCount }} + - --deletion-retry-count + - {{ .Values.deletionRetryCount }} + {{- end }} volumeMounts: - name: config-volume mountPath: /etc/config/ diff --git a/deploy/chart/values.yaml b/deploy/chart/values.yaml index 043337e46e2f34f87bd41c211223e0e302f6c0a0..c43a7444bea23cda2b856812aa3e581de80adb37 100644 --- a/deploy/chart/values.yaml +++ b/deploy/chart/values.yaml @@ -4,7 +4,7 @@ replicaCount: 1 image: repository: rancher/local-path-provisioner - tag: v0.0.20 + tag: v0.0.21 pullPolicy: IfNotPresent helperImage: @@ -139,7 +139,11 @@ configmap: image: busybox imagePullPolicy: IfNotPresent +# Number of provisioner worker threads to call provision/delete simultaneously. +# workerThreads: 4 +# Number of retries of failed volume provisioning. 0 means retry indefinitely. +# provisioningRetryCount: 15 - - +# Number of retries of failed volume deletion. 0 means retry indefinitely. +# deletionRetryCount: 15 diff --git a/deploy/local-path-storage.yaml b/deploy/local-path-storage.yaml index 9b4f4a4ee16583c950fc993b00fe3b1b67391136..32db7e10ef3f4bdd823267e2370e36f322b15abb 100644 --- a/deploy/local-path-storage.yaml +++ b/deploy/local-path-storage.yaml @@ -62,7 +62,7 @@ spec: serviceAccountName: local-path-provisioner-service-account containers: - name: local-path-provisioner - image: rancher/local-path-provisioner:v0.0.20 + image: rancher/local-path-provisioner:v0.0.21 imagePullPolicy: IfNotPresent command: - local-path-provisioner diff --git a/deploy/provisioner.yaml b/deploy/provisioner.yaml index cdd30c7c00bf92cf1ff36038a47c769b181573a2..17aa6f92998127f27d18a410c3268372dd2d318c 100644 --- a/deploy/provisioner.yaml +++ b/deploy/provisioner.yaml @@ -16,7 +16,7 @@ spec: serviceAccountName: local-path-provisioner-service-account containers: - name: local-path-provisioner - image: rancher/local-path-provisioner:v0.0.20 + image: rancher/local-path-provisioner:v0.0.21 imagePullPolicy: Always command: - local-path-provisioner diff --git a/main.go b/main.go index b80b30ec989a1fd32d8502f8bab6ce1afa765c29..0a54636a440b4c80969f8cf99f447674904d993a 100644 --- a/main.go +++ b/main.go @@ -19,26 +19,32 @@ import ( ) var ( - VERSION = "0.0.1" - FlagConfigFile = "config" - FlagProvisionerName = "provisioner-name" - EnvProvisionerName = "PROVISIONER_NAME" - DefaultProvisionerName = "rancher.io/local-path" - FlagNamespace = "namespace" - EnvNamespace = "POD_NAMESPACE" - DefaultNamespace = "local-path-storage" - FlagHelperImage = "helper-image" - EnvHelperImage = "HELPER_IMAGE" - DefaultHelperImage = "rancher/library-busybox:1.32.1" - FlagServiceAccountName = "service-account-name" - DefaultServiceAccount = "local-path-provisioner-service-account" - EnvServiceAccountName = "SERVICE_ACCOUNT_NAME" - FlagKubeconfig = "kubeconfig" - DefaultConfigFileKey = "config.json" - DefaultConfigMapName = "local-path-config" - FlagConfigMapName = "configmap-name" - FlagHelperPodFile = "helper-pod-file" - DefaultHelperPodFile = "helperPod.yaml" + VERSION = "0.0.1" + FlagConfigFile = "config" + FlagProvisionerName = "provisioner-name" + EnvProvisionerName = "PROVISIONER_NAME" + DefaultProvisionerName = "rancher.io/local-path" + FlagNamespace = "namespace" + EnvNamespace = "POD_NAMESPACE" + DefaultNamespace = "local-path-storage" + FlagHelperImage = "helper-image" + EnvHelperImage = "HELPER_IMAGE" + DefaultHelperImage = "rancher/library-busybox:1.32.1" + FlagServiceAccountName = "service-account-name" + DefaultServiceAccount = "local-path-provisioner-service-account" + EnvServiceAccountName = "SERVICE_ACCOUNT_NAME" + FlagKubeconfig = "kubeconfig" + DefaultConfigFileKey = "config.json" + DefaultConfigMapName = "local-path-config" + FlagConfigMapName = "configmap-name" + FlagHelperPodFile = "helper-pod-file" + DefaultHelperPodFile = "helperPod.yaml" + FlagWorkerThreads = "worker-threads" + DefaultWorkerThreads = pvController.DefaultThreadiness + FlagProvisioningRetryCount = "provisioning-retry-count" + DefaultProvisioningRetryCount = pvController.DefaultFailedProvisionThreshold + FlagDeletionRetryCount = "deletion-retry-count" + DefaultDeletionRetryCount = pvController.DefaultFailedDeleteThreshold ) func cmdNotFound(c *cli.Context, command string) { @@ -107,6 +113,21 @@ func StartCmd() cli.Command { Usage: "Paths to the Helper pod yaml file", Value: "", }, + cli.IntFlag{ + Name: FlagWorkerThreads, + Usage: "Number of provisioner worker threads.", + Value: DefaultWorkerThreads, + }, + cli.IntFlag{ + Name: FlagProvisioningRetryCount, + Usage: "Number of retries of failed volume provisioning. 0 means retry indefinitely.", + Value: DefaultProvisioningRetryCount, + }, + cli.IntFlag{ + Name: FlagDeletionRetryCount, + Usage: "Number of retries of failed volume deletion. 0 means retry indefinitely.", + Value: DefaultDeletionRetryCount, + }, }, Action: func(c *cli.Context) { if err := startDaemon(c); err != nil { @@ -222,6 +243,21 @@ func startDaemon(c *cli.Context) error { } } + provisioningRetryCount := c.Int(FlagProvisioningRetryCount) + if provisioningRetryCount < 0 { + return fmt.Errorf("invalid negative integer flag %v", FlagProvisioningRetryCount) + } + + deletionRetryCount := c.Int(FlagDeletionRetryCount) + if deletionRetryCount < 0 { + return fmt.Errorf("invalid negative integer flag %v", FlagDeletionRetryCount) + } + + workerThreads := c.Int(FlagWorkerThreads) + if workerThreads <= 0 { + return fmt.Errorf("invalid zero or negative integer flag %v", FlagWorkerThreads) + } + provisioner, err := NewProvisioner(stopCh, kubeClient, configFile, namespace, helperImage, configMapName, serviceAccountName, helperPodYaml) if err != nil { return err @@ -232,6 +268,9 @@ func startDaemon(c *cli.Context) error { provisioner, serverVersion.GitVersion, pvController.LeaderElection(false), + pvController.FailedProvisionThreshold(provisioningRetryCount), + pvController.FailedDeleteThreshold(deletionRetryCount), + pvController.Threadiness(workerThreads), ) logrus.Debug("Provisioner started") pc.Run(stopCh)