diff --git a/.github/workflows/manual.yml b/.github/workflows/manual.yml deleted file mode 100644 index 036e16aea..000000000 --- a/.github/workflows/manual.yml +++ /dev/null @@ -1,46 +0,0 @@ -# Workflow to ensure whenever a Github PR is submitted, -# a JIRA ticket gets created automatically. -name: Manual Workflow - -# Controls when the action will run. -on: - # Triggers the workflow on pull request events but only for the master branch - pull_request_target: - types: [opened, reopened] - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -jobs: - test-transition-issue: - name: Convert Github Issue to Jira Issue - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@master - - - name: Login - uses: atlassian/gajira-login@master - env: - JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} - JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} - JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} - - - name: Create NEW JIRA ticket - id: create - uses: atlassian/gajira-create@master - with: - project: CONUPDATE - issuetype: Task - summary: | - Github PR | Repo: ${{ github.repository }} | PR# ${{github.event.number}} - description: | - Repo link: https://github.com/${{ github.repository }} - PR no. ${{ github.event.pull_request.number }} - PR title: ${{ github.event.pull_request.title }} - PR description: ${{ github.event.pull_request.description }} - In addition, please resolve other issues, if any. - fields: '{"components": [{"name":"nd0821 - ML DevOpsEngineer ND"}], "customfield_16449":"https://classroom.udacity.com/", "customfield_16450":"Resolve the PR", "labels": ["github"], "priority":{"id": "4"}}' - - - name: Log created issue - run: echo "Issue ${{ steps.create.outputs.issue }} was created" diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml new file mode 100644 index 000000000..9ee9f65ff --- /dev/null +++ b/.github/workflows/python-app.yml @@ -0,0 +1,39 @@ +# This workflow will install Python dependencies, run tests and lint with a single version of Python +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python + +name: test_app + +on: + push: + branches: [ "master" ] + pull_request: + branches: [ "master" ] + +permissions: + contents: read + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.8 + uses: actions/setup-python@v3 + with: + python-version: "3.8" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install flake8 pytest + if [ -f starter/requirements.txt ]; then pip install -r starter/requirements.txt; fi + - name: Lint with flake8 + run: | + # stop the build if there are Python syntax errors or undefined names + # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + # - name: Test with pytest + # run: | + # pytest diff --git a/starter/requirements.txt b/starter/requirements.txt index fc7fe0309..98b75cec6 100644 --- a/starter/requirements.txt +++ b/starter/requirements.txt @@ -1,4 +1,3 @@ -python==3.8 numpy pandas scikit-learn diff --git a/starter/starter/ml/data.py b/starter/starter/ml/data.py index b46a8f013..2932d2eae 100644 --- a/starter/starter/ml/data.py +++ b/starter/starter/ml/data.py @@ -64,7 +64,7 @@ def process_data( y = lb.transform(y.values).ravel() # Catch the case where y is None because we're doing inference. except AttributeError: - pass + y = None X = np.concatenate([X_continuous, X_categorical], axis=1) return X, y, encoder, lb diff --git a/starter/starter/train_model.py b/starter/starter/train_model.py index 2caaf4cd4..ebf72b33a 100644 --- a/starter/starter/train_model.py +++ b/starter/starter/train_model.py @@ -3,6 +3,7 @@ from sklearn.model_selection import train_test_split # Add the necessary imports for the starter code. +from ml.data import process_data # Add code to load in the data.