Skip to content

Commit

Permalink
Obsrv 1.3.0 Release (#12)
Browse files Browse the repository at this point in the history
* feat: add connector config and connector stats update functions

* Issue #33 feat: add documentation for Dataset, Datasources, Data In and Query APIs

* feat: added descriptions for default configurations

* feat: added descriptions for default configurations

* feat: modified kafka connector input topic

* feat: obsrv setup instructions

* feat: revisiting open source features

* feat: masterdata processor job config

* Build deploy v2 (#19)

* #0 - Refactor Dockerfile and Github actions workflow
---------

Co-authored-by: Santhosh Vasabhaktula <[email protected]>
Co-authored-by: ManojCKrishna <[email protected]>

* Update DatasetModels.scala

* Release 1.3.0 into Main branch (#34)

* testing new images

* testing new images

* testing new images

* testing new images

* testing new images

* build new image with bug fixes

* update dockerfile

* update dockerfile

* #0 fix: upgrade packages

* #0 feat: add flink dockerfiles

* #0 fix: add individual extraction

* Issue #0 fix: upgrade ubuntu packages for vulnerabilities

* #0 fix: update github actions release condition

---------

Co-authored-by: ManojKrishnaChintaluri <[email protected]>
Co-authored-by: Praveen <[email protected]>
Co-authored-by: Sowmya N Dixit <[email protected]>

* Update DatasetModels.scala

* fix: update flink base image

* fix: update flink base image

---------

Co-authored-by: shiva-rakshith <[email protected]>
Co-authored-by: Aniket Sakinala <[email protected]>
Co-authored-by: GayathriSrividya <[email protected]>
Co-authored-by: Manjunath Davanam <[email protected]>
Co-authored-by: Manoj Krishna <[email protected]>
Co-authored-by: Santhosh Vasabhaktula <[email protected]>
Co-authored-by: ManojCKrishna <[email protected]>
Co-authored-by: ManojKrishnaChintaluri <[email protected]>
Co-authored-by: Praveen <[email protected]>
  • Loading branch information
10 people authored Nov 17, 2023
1 parent 1e5b677 commit 454b0e1
Show file tree
Hide file tree
Showing 20 changed files with 1,184 additions and 46 deletions.
1 change: 1 addition & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
.git
85 changes: 79 additions & 6 deletions .github/workflows/build_and_deploy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,38 +15,111 @@ jobs:
id: tag-checker
run: |
(echo -n TRIGGER_ALLOWED= && echo 'print("${{ github.ref_name }}".split("_")[0]
in ${{ vars.CURRENT_RELEASE }})' | python3) >> "$GITHUB_OUTPUT"
not in ${{ vars.CURRENT_RELEASE }})' | python3) >> "$GITHUB_OUTPUT"
docker-build:
needs: check-tag
if: needs.check-tag.outputs.ALLOWED_TAG == 'True'
runs-on: ubuntu-latest
strategy:
matrix:
include:
- image: "extractor"
target: "extractor-image"
- image: "preprocessor"
target: "preprocessor-image"
- image: "denormalizer"
target: "denormalizer-image"
- image: "transformer"
target: "transformer-image"
- image: "druid-router"
target: "router-image"
- image: "merged-pipeline"
target: "merged-image"
- image: "master-data-processor"
target: "master-data-processor-image"
- image: "kafka-connector"
target: "kafka-connector-image"


steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Maven Build
run: |
mvn clean install
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2

- name: Login to docker hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}

- name: Build docker image and push
- name: Build merged-pipeline image and push
uses: docker/build-push-action@v4
with:
platforms: linux/amd64
target: merged-image
push: true
tags: ${{ secrets.DOCKERHUB_USERNAME }}/obsrv-core:${{ github.ref_name }}
tags: ${{ secrets.DOCKERHUB_USERNAME }}/merged-pipeline:${{ github.ref_name }}

- name: Build merged-pipeline image and push
uses: docker/build-push-action@v4
with:
platforms: linux/amd64
target: master-data-processor-image
push: true
tags: ${{ secrets.DOCKERHUB_USERNAME }}/master-data-processor:${{ github.ref_name }}

- name: Build merged-pipeline image and push
uses: docker/build-push-action@v4
with:
platforms: linux/amd64
target: kafka-connector-image
push: true
tags: ${{ secrets.DOCKERHUB_USERNAME }}/kafka-connector:${{ github.ref_name }}

- name: Build ${{matrix.image}} image and push
uses: docker/build-push-action@v4
with:
platforms: linux/amd64
target: ${{matrix.target}}
push: true
tags: ${{ secrets.DOCKERHUB_USERNAME }}/${{matrix.image}}:${{ github.ref_name }}

aws-deploy:
needs: [check-tag, docker-build]
if: needs.check-tag.outputs.ALLOWED_TAG == 'True' && vars.CLOUD_PROVIDER == 'aws'
if: needs.check-tag.outputs.ALLOWED_TAG == 'True'
runs-on: ubuntu-latest
environment: aws-dev
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Terragrunt
uses: autero1/[email protected]
with:
terragrunt_version: v0.45.8
- name: Terragrunt installation
run: terragrunt --version

- name: Clone the terraform deployment repo
uses: actions/checkout@v3
with:
repository: ${{ vars.DEPLOY_REPO }}
path: deploy
ref: ${{ vars.DEPLOY_REPO_REF }}

- name: Fetch and update kubeconfig file
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION: ${{ vars.AWS_REGION }}
run: |
aws eks --region ${{ vars.AWS_REGION }} update-kubeconfig --name ${{ vars.KUBERNETES_CLUSTER_NAME }}
- name: Run terraform init and apply
env:
Expand All @@ -55,11 +128,11 @@ jobs:
AWS_REGION: ${{ vars.AWS_REGION }}
AWS_TERRAFORM_BACKEND_BUCKET_NAME: ${{ vars.AWS_TERRAFORM_BACKEND_BUCKET_NAME }}
AWS_TERRAFORM_BACKEND_BUCKET_REGION: ${{ vars.AWS_TERRAFORM_BACKEND_BUCKET_REGION }}
KUBE_CONFIG_PATH: ~/.kube/config
run: |
cd deploy/terraform/aws
terragrunt init
terragrunt apply -auto-approve -replace=module.flink.helm_release.flink \
-var flink_container_registry=${{ secrets.DOCKERHUB_USERNAME }} \
terragrunt apply -auto-approve -var merged_pipeline_enabled={{ vars.MERGED_PIPELINE || 'true' }} --replace='module.flink.helm_release.flink' \
-var flink_image_tag=${{ github.ref_name }}
azure-deploy:
Expand Down
53 changes: 40 additions & 13 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,14 +1,41 @@
FROM --platform=linux/x86_64 maven:3.6.0-jdk-11-slim AS build
FROM --platform=linux/x86_64 maven:3.9.4-eclipse-temurin-11-focal AS build-core
COPY . /app
RUN mvn -f /app/pom.xml clean package -DskipTests

FROM --platform=linux/x86_64 sanketikahub/flink:1.15.2-scala_2.12-java11
USER flink
COPY --from=build /app/dataset-registry/target/dataset-registry-1.0.0.jar $FLINK_HOME/lib/
COPY --from=build /app/framework/target/framework-1.0.0.jar $FLINK_HOME/lib/
COPY --from=build /app/pipeline/denormalizer/target/denormalizer-1.0.0.jar $FLINK_HOME/lib/
COPY --from=build /app/pipeline/druid-router/target/druid-router-1.0.0.jar $FLINK_HOME/lib/
COPY --from=build /app/pipeline/extractor/target/extractor-1.0.0.jar $FLINK_HOME/lib/
COPY --from=build /app/pipeline/pipeline-merged/target/pipeline-merged-1.0.0.jar $FLINK_HOME/lib/
COPY --from=build /app/pipeline/preprocessor/target/preprocessor-1.0.0.jar $FLINK_HOME/lib/
COPY --from=build /app/pipeline/transformer/target/transformer-1.0.0.jar $FLINK_HOME/lib/
RUN mvn clean install -DskipTests -f /app/framework/pom.xml
RUN mvn clean install -DskipTests -f /app/dataset-registry/pom.xml

FROM --platform=linux/x86_64 maven:3.9.4-eclipse-temurin-11-focal AS build-pipeline
COPY --from=build-core /root/.m2 /root/.m2
COPY . /app
RUN mvn clean package -DskipTests -f /app/pipeline/pom.xml

FROM --platform=linux/x86_64 sunbird/flink:1.15.2-scala_2.12-jdk-11 as extractor-image
USER flink
COPY --from=build-pipeline /app/pipeline/extractor/target/extractor-1.0.0.jar $FLINK_HOME/lib/

FROM --platform=linux/x86_64 sunbird/flink:1.15.2-scala_2.12-jdk-11 as preprocessor-image
USER flink
COPY --from=build-pipeline /app/pipeline/preprocessor/target/preprocessor-1.0.0.jar $FLINK_HOME/lib/

FROM --platform=linux/x86_64 sunbird/flink:1.15.2-scala_2.12-jdk-11 as denormalizer-image
USER flink
COPY --from=build-pipeline /app/pipeline/denormalizer/target/denormalizer-1.0.0.jar $FLINK_HOME/lib/

FROM --platform=linux/x86_64 sunbird/flink:1.15.2-scala_2.12-jdk-11 as transformer-image
USER flink
COPY --from=build-pipeline /app/pipeline/transformer/target/transformer-1.0.0.jar $FLINK_HOME/lib/

FROM --platform=linux/x86_64 sunbird/flink:1.15.2-scala_2.12-jdk-11 as router-image
USER flink
COPY --from=build-pipeline /app/pipeline/druid-router/target/druid-router-1.0.0.jar $FLINK_HOME/lib/

FROM --platform=linux/x86_64 sunbird/flink:1.15.2-scala_2.12-jdk-11 as merged-image
USER flink
COPY --from=build-pipeline /app/pipeline/pipeline-merged/target/pipeline-merged-1.0.0.jar $FLINK_HOME/lib/

FROM --platform=linux/x86_64 sunbird/flink:1.15.2-scala_2.12-jdk-11 as master-data-processor-image
USER flink
COPY --from=build-pipeline /app/pipeline/master-data-processor/target/master-data-processor-1.0.0.jar $FLINK_HOME/lib

FROM --platform=linux/x86_64 sunbird/flink:1.15.2-scala_2.12-jdk-11 as kafka-connector-image
USER flink
COPY --from=build-pipeline /app/pipeline/kafka-connector/target/kafka-connector-1.0.0.jar $FLINK_HOME/lib
Loading

0 comments on commit 454b0e1

Please sign in to comment.