diff --git a/ansible/artifacts-download.yml b/ansible/artifacts-download.yml index fcf58787f5..6b8629cc4e 100644 --- a/ansible/artifacts-download.yml +++ b/ansible/artifacts-download.yml @@ -40,3 +40,17 @@ aws_access_key_id: "{{ cloud_artifact_storage_accountname }}" aws_secret_access_key: "{{ cloud_artifact_storage_secret }}" when: cloud_service_provider == "aws" + + + - name: download artifact from oci oss + include_role: + name: oci-cloud-storage + apply: + environment: + OCI_CLI_AUTH: "instance_principal" + tasks_from: download.yml + vars: + local_file_or_folder_path: "{{ artifact_path }}" + oss_bucket_name: "{{ cloud_storage_artifacts_bucketname }}" + oss_object_name: "{{ artifact }}" + when: cloud_service_provider == "oci" diff --git a/ansible/artifacts-upload.yml b/ansible/artifacts-upload.yml index 305492afc2..c11ec36c3c 100644 --- a/ansible/artifacts-upload.yml +++ b/ansible/artifacts-upload.yml @@ -41,3 +41,16 @@ aws_access_key_id: "{{ cloud_artifact_storage_accountname }}" aws_secret_access_key: "{{ cloud_artifact_storage_secret }}" when: cloud_service_provider == "aws" + + - name: upload artifact to oci oss + include_role: + name: oci-cloud-storage + apply: + environment: + OCI_CLI_AUTH: "instance_principal" + tasks_from: upload.yml + vars: + local_file_or_folder_path: "{{ artifact_path }}" + oss_bucket_name: "{{ cloud_storage_artifacts_bucketname }}" + oss_path: "{{ artifact }}" + when: cloud_service_provider == "oci" \ No newline at end of file diff --git a/ansible/roles/analytics-bootstrap-always/meta/main.yml b/ansible/roles/analytics-bootstrap-always/meta/main.yml index af15826aef..3566881eff 100644 --- a/ansible/roles/analytics-bootstrap-always/meta/main.yml +++ b/ansible/roles/analytics-bootstrap-always/meta/main.yml @@ -1,6 +1,7 @@ --- dependencies: - { role: jdk11 , become: yes } - - { role: azure-cli , become: yes } + - { role: azure-cli , become: yes, when: cloud_service_provider == "azure" } + - { role: oci-cli , become: yes, when: cloud_service_provider == "oci" } diff --git a/ansible/roles/analytics-bootstrap-spark/tasks/main.yml b/ansible/roles/analytics-bootstrap-spark/tasks/main.yml index 13ba75f78a..663f76d68a 100644 --- a/ansible/roles/analytics-bootstrap-spark/tasks/main.yml +++ b/ansible/roles/analytics-bootstrap-spark/tasks/main.yml @@ -10,6 +10,36 @@ with_items: - {var: 'azure_storage_key', value: '{{ sunbird_private_storage_account_name }}'} - {var: 'azure_storage_secret', value: '{{ sunbird_private_storage_account_key }}'} + + +- name: Adding PATH for oci cli Vars to bashrc file of spark. + become: yes + become_user: "{{ analytics_user }}" + lineinfile: + path: '{{ analytics_user_home }}/.bashrc' + line: 'export PATH={{ analytics_user_home }}/bin:$PATH' + regexp: "export PATH={{ analytics_user_home }}/bin.*" + when: cloud_service_provider == "oci" + +- name: Configure OCI cli + become: yes + become_user: "{{ analytics_user }}" + file: + path: "{{ analytics_user_home }}/.oci" + state: directory + when: cloud_service_provider == "oci" + +- name: Create OCI cli config location + become: yes + become_user: "{{ analytics_user }}" + template: src=oci-key.j2 dest={{ analytics_user_home }}/.oci/oci-key.pem mode=600 owner={{ analytics_user }} group={{ analytics_group }} + when: cloud_service_provider == "oci" + +- name: Create OCI cli config file + become: yes + become_user: "{{ analytics_user }}" + template: src=oci-cli-config.j2 dest={{ analytics_user_home }}/.oci/config mode=600 owner={{ analytics_user }} group={{ analytics_group }} + when: cloud_service_provider == "oci" - name: Adding ENV Vars to spark servers environment. become: yes diff --git a/ansible/roles/analytics-bootstrap-spark/templates/oci-cli-config.j2 b/ansible/roles/analytics-bootstrap-spark/templates/oci-cli-config.j2 new file mode 100644 index 0000000000..56cf3ba3ef --- /dev/null +++ b/ansible/roles/analytics-bootstrap-spark/templates/oci-cli-config.j2 @@ -0,0 +1,6 @@ +[DEFAULT] +user={{oci_cli_user_ocid }} +fingerprint={{oci_cli_fingerprint}} +key_file=/home/analytics/.oci/oci-key.pem +tenancy={{oci_cli_tenancy}} +region={{oci_cli_region}} \ No newline at end of file diff --git a/ansible/roles/analytics-bootstrap-spark/templates/oci-key.j2 b/ansible/roles/analytics-bootstrap-spark/templates/oci-key.j2 new file mode 100644 index 0000000000..b969594016 --- /dev/null +++ b/ansible/roles/analytics-bootstrap-spark/templates/oci-key.j2 @@ -0,0 +1 @@ +{{ oci_cli_key_content }} \ No newline at end of file diff --git a/ansible/roles/analytics-spark-provision/tasks/main.yml b/ansible/roles/analytics-spark-provision/tasks/main.yml index 25ebd9da23..65731cecb9 100644 --- a/ansible/roles/analytics-spark-provision/tasks/main.yml +++ b/ansible/roles/analytics-spark-provision/tasks/main.yml @@ -117,28 +117,31 @@ recurse: yes become: yes +# kenneth changed to install Ruby 2.6 as per R.4.6.0 - name: Install latest ruby become: yes become_user: "{{ analytics_user }}" - shell: "export PATH=$PATH:/home/analytics/.rvm/bin && rvm install ruby-2.5" + shell: "export PATH=$PATH:/home/analytics/.rvm/bin && rvm install ruby-2.6" - name: Add ruby repository become: yes apt_repository: repo: ppa:brightbox/ruby-ng +# kenneth changed to install ruby-dev 2.6 as per R.4.6.0 - there is no ruby2.2-dev in bionic - name: Install latest ruby-dev become: yes apt: - name: "ruby2.5-dev" + name: "ruby2.6-dev" state: installed update_cache: true cache_valid_time: 3600 +# changed to ruby 2.6 as per R.4.6.0 - name: Install ruby-kafka become: yes become_user: "{{ analytics_user }}" - shell: "bash -ilc 'export PATH=$PATH:/home/analytics/.rvm/bin && rvm --default use ruby-2.5 && gem install ruby-kafka'" + shell: "bash -ilc 'export PATH=$PATH:/home/analytics/.rvm/bin && rvm --default use ruby-2.6 && gem install --user-install --no-document ruby-kafka'" - name: Download Kafka-2.11 become: yes diff --git a/ansible/roles/data-products-deploy/tasks/main.yml b/ansible/roles/data-products-deploy/tasks/main.yml index df495a5d4a..f4cbf7c216 100644 --- a/ansible/roles/data-products-deploy/tasks/main.yml +++ b/ansible/roles/data-products-deploy/tasks/main.yml @@ -5,6 +5,19 @@ tags: - always +- name: Ensure oci oss bucket exists + command: "/home/{{analytics_user}}/bin/oci os bucket get --name {{ bucket }}" + register: check_bucket + when: dp_object_store_type == "s3" and cloud_service_provider == "oci" + tags: + - always + +- name: Create oci oss bucket + command: "/home/{{analytics_user}}/bin/oci os bucket create -c {{oci_bucket_compartment}} --name {{bucket}}" + when: dp_object_store_type == "s3" and cloud_service_provider == "oci" and check_bucket.rc !=0 + tags: + - always + - name: Copy Core Data Products copy: src={{ analytics_batch_module_artifact }} dest={{ analytics.home }}/models-{{ model_version }} tags: @@ -14,6 +27,15 @@ command: az storage blob upload --overwrite -c {{ bucket }} --name models-{{ model_version }}/{{ analytics_batch_module_artifact }} -f {{ analytics.home }}/models-{{ model_version }}/{{ analytics_batch_module_artifact }} async: 3600 poll: 10 + when: dp_object_store_type == "azure" + tags: + - dataproducts-spark-cluster + +- name: Copy Core Data Products to oci oss + command: /home/{{analytics_user}}/bin/oci os object put -bn {{ bucket }} --name models-{{ model_version }}/{{ analytics_batch_module_artifact }} --file {{ analytics.home }}/models-{{ model_version }}/{{ analytics_batch_module_artifact }} --force + async: 3600 + poll: 10 + when: dp_object_store_type == "s3" and cloud_service_provider == "oci" tags: - dataproducts-spark-cluster @@ -27,8 +49,17 @@ command: az storage blob upload --overwrite -c {{ bucket }} --name models-{{ model_version }}/data-products-1.0.jar -f {{ analytics.home }}/models-{{ model_version }}/data-products-1.0/data-products-1.0.jar async: 3600 poll: 10 + when: dp_object_store_type == "azure" + tags: + - ed-dataproducts-spark-cluster + +- name: Copy Ed Data Products to oci oss + command: /home/{{analytics_user}}/bin/oci os object put -bn {{ bucket }} --name models-{{ model_version }}/data-products-1.0.jar --file {{ analytics.home }}/models-{{ model_version }}/data-products-1.0/data-products-1.0.jar --force + async: 3600 + poll: 10 + when: dp_object_store_type == "s3" and cloud_service_provider == "oci" tags: - - ed-dataproducts-spark-cluster + - ed-dataproducts-spark-cluster - name: Copy Framework Library copy: src={{ analytics_core_artifact }} dest={{ analytics.home }}/models-{{ model_version }} @@ -39,6 +70,15 @@ command: az storage blob upload --overwrite --debug -c {{ bucket }} --name models-{{ model_version }}/{{ analytics_core_artifact }} -f {{ analytics.home }}/models-{{ model_version }}/{{ analytics_core_artifact }} async: 3600 poll: 10 + when: dp_object_store_type == "azure" + tags: + - framework-spark-cluster + +- name: Copy Framework Library to oci oss + command: /home/{{analytics_user}}/bin/oci os object put -bn {{ bucket }} --name models-{{ model_version }}/{{ analytics_core_artifact }} --file {{ analytics.home }}/models-{{ model_version }}/{{ analytics_core_artifact }} --force + async: 3600 + poll: 10 + when: dp_object_store_type == "s3" and cloud_service_provider == "oci" tags: - framework-spark-cluster @@ -51,6 +91,15 @@ command: az storage blob upload --overwrite -c {{ bucket }} --name models-{{ model_version }}/{{ scruid_artifact }} -f {{ analytics.home }}/models-{{ model_version }}/{{ scruid_artifact }} async: 3600 poll: 10 + when: dp_object_store_type == "azure" + tags: + - framework-spark-cluster + +- name: Copy Scruid Library to oci oss + command: /home/{{analytics_user}}/bin/oci os object put -bn {{ bucket }} --name models-{{ model_version }}/{{ scruid_artifact }} --file {{ analytics.home }}/models-{{ model_version }}/{{ scruid_artifact }} --force + async: 3600 + poll: 10 + when: dp_object_store_type == "s3" and cloud_service_provider == "oci" tags: - framework-spark-cluster @@ -100,9 +149,18 @@ command: az storage blob upload --overwrite -c {{ bucket }} -f {{ analytics.home }}/models-{{ model_version }}/application.conf --name models-{{ model_version }}/application.conf async: 3600 poll: 10 + when: dp_object_store_type == "azure" tags: - framework-spark-cluster +- name: Copy configuration file to oci oss + command: /home/{{analytics_user}}/bin/oci os object put -bn {{ bucket }} --file {{ analytics.home }}/models-{{ model_version }}/application.conf --name models-{{ model_version }}/application.conf --force + async: 3600 + poll: 10 + when: dp_object_store_type == "s3" and cloud_service_provider == "oci" + tags: + - framework-spark-cluster + - name: Copy log4j2 xml file template: src=log4j2.xml.j2 dest={{ analytics.home }}/models-{{ model_version }}/log4j2.xml mode=755 owner={{ analytics_user }} group={{ analytics_group }} tags: [ dataproducts, framework, ed-dataproducts ] diff --git a/ansible/roles/oci-cli/defaults/main.yml b/ansible/roles/oci-cli/defaults/main.yml new file mode 100644 index 0000000000..147a2e03f1 --- /dev/null +++ b/ansible/roles/oci-cli/defaults/main.yml @@ -0,0 +1 @@ +oci_cli_url: https://github.com/oracle/oci-cli/releases/download/v3.22.0/oci-cli-3.22.0-Ubuntu-18.04-Offline.zip \ No newline at end of file diff --git a/ansible/roles/oci-cli/tasks/main.yml b/ansible/roles/oci-cli/tasks/main.yml new file mode 100644 index 0000000000..389a9e8235 --- /dev/null +++ b/ansible/roles/oci-cli/tasks/main.yml @@ -0,0 +1,24 @@ +--- +- name: Download the installation file + get_url: + url: "{{ oci_cli_url }}" + dest: /tmp/ocicli.zip + +- name: Installing unzip + apt: + name: "{{item}}" + state: latest + with_items: + - zip + - unzip + +- name: Unzip the installer + unarchive: + src: /tmp/ocicli.zip + dest: /tmp/ + remote_src: yes + +- name: install oci cli + shell: ./oci-cli-installation/install.sh --install-dir {{ analytics_user_home }} --exec-dir {{ analytics_user_home }} --script-dir {{ analytics_user_home }} --accept-all-defaults + args: + chdir: /tmp/ diff --git a/ansible/roles/oci-cloud-storage/defaults/main.yml b/ansible/roles/oci-cloud-storage/defaults/main.yml new file mode 100644 index 0000000000..72727de167 --- /dev/null +++ b/ansible/roles/oci-cloud-storage/defaults/main.yml @@ -0,0 +1,3 @@ +oss_bucket_name: "" +oss_path: "" +local_file_or_folder_path: "" diff --git a/ansible/roles/oci-cloud-storage/tasks/delete-folder.yml b/ansible/roles/oci-cloud-storage/tasks/delete-folder.yml new file mode 100644 index 0000000000..6ed4e6b8b4 --- /dev/null +++ b/ansible/roles/oci-cloud-storage/tasks/delete-folder.yml @@ -0,0 +1,5 @@ +--- +- name: delete files and folders recursively + shell: "oci os object bulk-delete -ns {{oss_namespace}} -bn {{oss_bucket_name}} --prefix {{oss_path}} --force" + async: 3600 + poll: 10 diff --git a/ansible/roles/oci-cloud-storage/tasks/delete.yml b/ansible/roles/oci-cloud-storage/tasks/delete.yml new file mode 100644 index 0000000000..65d18843ca --- /dev/null +++ b/ansible/roles/oci-cloud-storage/tasks/delete.yml @@ -0,0 +1,7 @@ +- name: Ensure oci oss bucket exists + command: oci os bucket get --name {{ oss_bucket_name }} + +- name: Upload to oci oss bucket + command: oci os object delete -bn {{ oss_bucket_name }} --name {{ oss_path }} --force + async: 3600 + poll: 10 \ No newline at end of file diff --git a/ansible/roles/oci-cloud-storage/tasks/download.yml b/ansible/roles/oci-cloud-storage/tasks/download.yml new file mode 100644 index 0000000000..bb32e9ed93 --- /dev/null +++ b/ansible/roles/oci-cloud-storage/tasks/download.yml @@ -0,0 +1,7 @@ +- name: Ensure oci oss bucket exists + command: oci os bucket get --name {{ oss_bucket_name }} + +- name: download files from oci oss bucket + command: oci os object get -bn {{ oss_bucket_name }} --name {{ oss_object_name }} --file {{ local_file_or_folder_path }} + async: 3600 + poll: 10 diff --git a/ansible/roles/oci-cloud-storage/tasks/main.yml b/ansible/roles/oci-cloud-storage/tasks/main.yml new file mode 100644 index 0000000000..6f9dca6b63 --- /dev/null +++ b/ansible/roles/oci-cloud-storage/tasks/main.yml @@ -0,0 +1,18 @@ +--- +- name: delete files from oci oss bucket + include: delete.yml + +- name: delete folders from oci oss bucket recursively + include: delete-folder.yml + + +- name: download file from oss + include: download.yml + +- name: upload files from a local to oci oss + include: upload.yml + +- name: upload files and folder from local directory to oci oss + include: upload-folder.yml + + diff --git a/ansible/roles/oci-cloud-storage/tasks/upload-folder.yml b/ansible/roles/oci-cloud-storage/tasks/upload-folder.yml new file mode 100644 index 0000000000..6e4d06562c --- /dev/null +++ b/ansible/roles/oci-cloud-storage/tasks/upload-folder.yml @@ -0,0 +1,8 @@ +--- +- name: Ensure oci oss bucket exists + command: oci os bucket get --name {{ oss_bucket_name }} + +- name: Upload folder to oci oss bucket + command: oci os object bulk-upload -bn {{ oss_bucket_name }} --prefix {{ oss_path }} --src-dir {{ local_file_or_folder_path }} --content-type auto + async: 3600 + poll: 10 diff --git a/ansible/roles/oci-cloud-storage/tasks/upload.yml b/ansible/roles/oci-cloud-storage/tasks/upload.yml new file mode 100644 index 0000000000..2771da5771 --- /dev/null +++ b/ansible/roles/oci-cloud-storage/tasks/upload.yml @@ -0,0 +1,8 @@ +--- +- name: Ensure oci oss bucket exists + command: oci os bucket get --name {{ oss_bucket_name }} + +- name: Upload to oci oss bucket + command: oci os object put -bn {{ oss_bucket_name }} --name {{ oss_path }} --file {{ local_file_or_folder_path }} --content-type auto --force + async: 3600 + poll: 10 diff --git a/ansible/roles/portal-dashboard/tasks/main.yml b/ansible/roles/portal-dashboard/tasks/main.yml index adfbd76b39..7c53a72307 100644 --- a/ansible/roles/portal-dashboard/tasks/main.yml +++ b/ansible/roles/portal-dashboard/tasks/main.yml @@ -12,7 +12,7 @@ pip: name: "{{library_path}}" virtualenv: "{{ virtualenv_path }}" - virtualenv_python: "python3.6" + # virtualenv_python: "python3.6" tags: - common diff --git a/kubernetes/helm_charts/bootstrap/reloader/templates/clusterrole.yaml b/kubernetes/helm_charts/bootstrap/reloader/templates/clusterrole.yaml index 8d51ef406b..b2817c5f9a 100755 --- a/kubernetes/helm_charts/bootstrap/reloader/templates/clusterrole.yaml +++ b/kubernetes/helm_charts/bootstrap/reloader/templates/clusterrole.yaml @@ -1,5 +1,9 @@ {{- if and .Values.reloader.watchGlobally (.Values.reloader.rbac.enabled) }} +{{- if (.Capabilities.APIVersions.Has "rbac.authorization.k8s.io/v1") }} +apiVersion: rbac.authorization.k8s.io/v1 +{{ else }} apiVersion: rbac.authorization.k8s.io/v1beta1 +{{- end }} kind: ClusterRole metadata: labels: diff --git a/kubernetes/helm_charts/bootstrap/reloader/templates/clusterrolebinding.yaml b/kubernetes/helm_charts/bootstrap/reloader/templates/clusterrolebinding.yaml index 28c9d4b916..748e52528d 100755 --- a/kubernetes/helm_charts/bootstrap/reloader/templates/clusterrolebinding.yaml +++ b/kubernetes/helm_charts/bootstrap/reloader/templates/clusterrolebinding.yaml @@ -1,5 +1,9 @@ {{- if and .Values.reloader.watchGlobally (.Values.reloader.rbac.enabled) }} +{{- if (.Capabilities.APIVersions.Has "rbac.authorization.k8s.io/v1") }} +apiVersion: rbac.authorization.k8s.io/v1 +{{ else }} apiVersion: rbac.authorization.k8s.io/v1beta1 +{{- end }} kind: ClusterRoleBinding metadata: labels: diff --git a/kubernetes/helm_charts/bootstrap/reloader/templates/role.yaml b/kubernetes/helm_charts/bootstrap/reloader/templates/role.yaml index 5827f5cdcb..b654024031 100755 --- a/kubernetes/helm_charts/bootstrap/reloader/templates/role.yaml +++ b/kubernetes/helm_charts/bootstrap/reloader/templates/role.yaml @@ -1,5 +1,9 @@ {{- if and (not (.Values.reloader.watchGlobally)) (.Values.reloader.rbac.enabled) }} +{{- if (.Capabilities.APIVersions.Has "rbac.authorization.k8s.io/v1") }} +apiVersion: rbac.authorization.k8s.io/v1 +{{ else }} apiVersion: rbac.authorization.k8s.io/v1beta1 +{{- end }} kind: Role metadata: labels: diff --git a/kubernetes/helm_charts/bootstrap/reloader/templates/rolebinding.yaml b/kubernetes/helm_charts/bootstrap/reloader/templates/rolebinding.yaml index 94fb1f838b..d915db304d 100755 --- a/kubernetes/helm_charts/bootstrap/reloader/templates/rolebinding.yaml +++ b/kubernetes/helm_charts/bootstrap/reloader/templates/rolebinding.yaml @@ -1,5 +1,9 @@ {{- if and (not (.Values.reloader.watchGlobally)) (.Values.reloader.rbac.enabled) }} +{{- if (.Capabilities.APIVersions.Has "rbac.authorization.k8s.io/v1") }} +apiVersion: rbac.authorization.k8s.io/v1 +{{ else }} apiVersion: rbac.authorization.k8s.io/v1beta1 +{{- end }} kind: RoleBinding metadata: labels: diff --git a/kubernetes/helm_charts/datapipeline_jobs/values.j2 b/kubernetes/helm_charts/datapipeline_jobs/values.j2 index 22630c3015..cbafb57c22 100644 --- a/kubernetes/helm_charts/datapipeline_jobs/values.j2 +++ b/kubernetes/helm_charts/datapipeline_jobs/values.j2 @@ -8,7 +8,12 @@ azure_account: {{ azure_account }} azure_secret: {{ azure_secret }} s3_access_key: {{ s3_storage_key }} s3_secret_key: {{ s3_storage_secret }} +{% if cloud_service_provider == "oci" %} +s3_endpoint: {{ oci_flink_s3_storage_endpoint }} +{% else %} s3_endpoint: {{ s3_storage_endpoint }} +{% endif %} + s3_path_style_access: {{ s3_path_style_access }} serviceMonitor: @@ -158,7 +163,8 @@ base_config: | {% if checkpoint_store_type == "azure" %} base.url = "wasbs://"${job.statebackend.blob.storage.container}"@"${job.statebackend.blob.storage.account}"/"${job.statebackend.blob.storage.checkpointing.dir} {% elif checkpoint_store_type == "s3" %} - base.url = "s3://"${job.statebackend.blob.storage.account}"/"${job.statebackend.blob.storage.container}"/"${job.statebackend.blob.storage.checkpointing.dir} + # base.url = "s3://"${job.statebackend.blob.storage.account}"/"${job.statebackend.blob.storage.container}"/"${job.statebackend.blob.storage.checkpointing.dir} + base.url = "s3://"${job.statebackend.blob.storage.container}"/"${job.statebackend.blob.storage.checkpointing.dir} {% endif %} } } @@ -236,7 +242,6 @@ ingest-router: taskmanager.memory.process.size: {{ flink_job_names['ingest-router'].taskmanager_process_memory }} jobmanager.memory.process.size: {{ flink_job_names['ingest-router'].jobmanager_process_memory }} - telemetry-extractor: telemetry-extractor: |+ include file("/data/flink/conf/base-config.conf") @@ -287,6 +292,7 @@ telemetry-extractor: taskmanager.memory.process.size: {{ flink_job_names['telemetry-extractor'].taskmanager_process_memory }} jobmanager.memory.process.size: {{ flink_job_names['telemetry-extractor'].jobmanager_process_memory }} + pipeline-preprocessor: pipeline-preprocessor: |+ include file("/data/flink/conf/base-config.conf") diff --git a/kubernetes/helm_charts/secor/config/secor.common.properties b/kubernetes/helm_charts/secor/config/secor.common.properties index 7050ebcf1b..ebe2bb7d26 100644 --- a/kubernetes/helm_charts/secor/config/secor.common.properties +++ b/kubernetes/helm_charts/secor/config/secor.common.properties @@ -23,12 +23,12 @@ secor.kafka.topic_blacklist= # Choose what to fill according to the service you are using # in the choice option you can fill S3, GS, Swift or Azure -cloud.service=Azure +cloud.service={{ $.Values.storage_type }} # AWS authentication credentials. # Leave empty if using IAM role-based authentication with s3a filesystem. -aws.access.key= -aws.secret.key= +aws.access.key={{ $.Values.s3_access_key }} +aws.secret.key={{ $.Values.s3_secret_id }} aws.role= # Optional Proxy Setting. Set to true to enable proxy @@ -51,12 +51,12 @@ aws.proxy.http.port= # secor.upload.manager.class. # # http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region -aws.region= -aws.endpoint= +aws.region={{ $.Values.s3_region }} +aws.endpoint={{ $.Values.s3_endpoint }} # Toggle the AWS S3 client between virtual host style access and path style # access. See http://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html -aws.client.pathstyleaccess=false +aws.client.pathstyleaccess={{ $.Values.s3_path_style_access }} ########################### # START AWS S3 ENCRYPTION # @@ -357,7 +357,8 @@ secor.max.message.size.bytes=100000 # Class that will manage uploads. Default is to use the hadoop # interface to S3. -secor.upload.manager.class=com.pinterest.secor.uploader.AzureUploadManager +# secor.upload.manager.class=com.pinterest.secor.uploader.AzureUploadManager +secor.upload.manager.class=com.pinterest.secor.uploader.S3UploadManager #Set below property to your timezone, and the events will be parsed and converted to the timezone specified secor.message.timezone=UTC diff --git a/kubernetes/helm_charts/secor/config/secor.partition.properties b/kubernetes/helm_charts/secor/config/secor.partition.properties index 743e1bab86..cbcc742081 100644 --- a/kubernetes/helm_charts/secor/config/secor.partition.properties +++ b/kubernetes/helm_charts/secor/config/secor.partition.properties @@ -14,7 +14,9 @@ # limitations under the License. include=secor.properties +{{- if eq .Values.storage_type "Azure" }} include=secor.azure.properties +{{- end }} # Name of the Kafka consumer group. secor.kafka.group={{ get (get $.Values.secor_jobs $.Release.Name) "consumer_group" }} @@ -23,7 +25,7 @@ secor.kafka.group={{ get (get $.Values.secor_jobs $.Release.Name) "consumer_grou secor.message.parser.class={{ get (get $.Values.secor_jobs $.Release.Name) "message_parser" }} # S3 path where sequence files are stored. -secor.s3.path= +secor.s3.path={{- get (get $.Values.secor_jobs $.Release.Name) "base_path" }} # Swift path where sequence files are stored. secor.swift.path=secor_dev/partition diff --git a/kubernetes/helm_charts/secor/config/secor.properties b/kubernetes/helm_charts/secor/config/secor.properties index 6f2876d1de..4a724a051a 100644 --- a/kubernetes/helm_charts/secor/config/secor.properties +++ b/kubernetes/helm_charts/secor/config/secor.properties @@ -10,7 +10,7 @@ include=secor.common.properties ############### # Name of the s3 bucket where log files are stored. -secor.s3.bucket= +secor.s3.bucket={{ $.Values.s3_bucket_name }} ############### # Using Swift # diff --git a/kubernetes/helm_charts/secor/values.j2 b/kubernetes/helm_charts/secor/values.j2 index 4aa2e0ee83..0519937267 100644 --- a/kubernetes/helm_charts/secor/values.j2 +++ b/kubernetes/helm_charts/secor/values.j2 @@ -1,9 +1,25 @@ azure_account: "{{ sunbird_private_storage_account_name }}" azure_secret: "{{ sunbird_private_storage_account_key }}" -azure_container_name: "telemetry-data-store" +azure_container_name: "{{cloud_storage_telemetry_bucketname}}" -namespace: {{ secor_namespace }} +s3_access_key: "{{s3_storage_key}}" +s3_secret_id: "{{s3_storage_secret}}" +s3_region: "{{oci_region}}" +s3_endpoint: "{{s3_storage_endpoint}}" +s3_path_style_access: "{{s3_path_style_access}}" +s3_bucket_name: "{{cloud_storage_telemetry_bucketname}}" + +{% if cloud_service_provider == 'oci' -%} +storage_type: "S3" +storageClass: "oci-bv" +{%- else -%} +storage_type: "Azure" storageClass: {{ secor_storage_class | default('default') }} +{%- endif %} + + + +namespace: {{ secor_namespace }} imagepullsecrets: {{ imagepullsecrets }} secor_jobs: