diff --git a/.github/workflows/java-ci.yml b/.github/workflows/java-ci.yml index 5c4be4b..5839260 100644 --- a/.github/workflows/java-ci.yml +++ b/.github/workflows/java-ci.yml @@ -11,41 +11,6 @@ on: - main jobs: - ollama-e: - runs-on: ubuntu-22.04 - name: ollama-e - steps: - - uses: actions/checkout@v1 - #- uses: self-actuated/nvidia-run@master - - name: Install Ollama - run: | - curl -fsSL https://ollama.com/install.sh | sudo -E sh - - name: Start serving - run: | - # Run the background, there is no way to daemonise at the moment - ollama serve & - - # A short pause is required before the HTTP port is opened - sleep 5 - - # This endpoint blocks until ready - time curl -i http://localhost:11434 - - - name: Pull llama3 - run: | - ollama pull llama3 - - #- name: Invoke via the CLI - # run: | - # ollama run llama2 "What are the pros of MicroVMs for continous integrations, especially if Docker is the alternative?" - - #- name: Invoke via API - # run: | - # curl -s http://localhost:11434/api/generate -d '{ - # "model": "llama2", - # "stream": false, - # "prompt":"What are the risks of running privileged Docker containers for CI workloads?" - # }' | jq build: runs-on: ubuntu-22.04 strategy: @@ -54,24 +19,6 @@ jobs: name: JDK ${{ matrix.Java }} build steps: - uses: actions/checkout@v4.1.1 - - name: Install Ollama - run: | - curl -fsSL https://ollama.com/install.sh | sudo -E sh - - name: Start serving - run: | - # Run the background, there is no way to daemonise at the moment - ollama serve & - - # A short pause is required before the HTTP port is opened - sleep 5 - - # This endpoint blocks until ready - time curl -i http://localhost:11434 - - - name: Pull llama3 - run: | - ollama pull llama3 - - name: Set up Java uses: actions/setup-java@v4.2.1 with: